Python os.path.sep() Examples

The following are 30 code examples for showing how to use os.path.sep(). These examples are extracted from open source projects. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example.

You may check out the related API usage on the sidebar.

You may also want to check out all available functions/classes of the module os.path , or try the search function .

Example 1
Project: spleeter   Author: deezer   File: __init__.py    License: MIT License 6 votes vote down vote up
def get(self, model_directory):
        """ Ensures required model is available at given location.

        :param model_directory: Expected model_directory to be available.
        :raise IOError: If model can not be retrieved.
        """
        # Expend model directory if needed.
        if not isabs(model_directory):
            model_directory = join(self.DEFAULT_MODEL_PATH, model_directory)
        # Download it if not exists.
        model_probe = join(model_directory, self.MODEL_PROBE_PATH)
        if not exists(model_probe):
            if not exists(model_directory):
                makedirs(model_directory)
                self.download(
                    model_directory.split(sep)[-1],
                    model_directory)
                self.writeProbe(model_directory)
        return model_directory 
Example 2
Project: imgcomp-cvpr   Author: fab-jul   File: logdir_helpers.py    License: GNU General Public License v3.0 6 votes vote down vote up
def create_unique_log_dir(config_rel_paths, log_dir_root, line_breaking_chars_pat=r'[-]', restore_dir=None):
    """
    0117_1704 repr@soa3_med_8e*5_deePer_b50_noHM_C16 repr@v2_res_shallow RESTORE@path@to@restore@0115_1340
    :param config_rel_paths:
    :param log_dir_root:
    :param line_breaking_chars_pat:
    :return:
    """
    if any(':' in config_rel_path for config_rel_path in config_rel_paths):
        raise ValueError('":" not allowed in paths, got {}'.format(config_rel_paths))

    def prep_path(p):
        p = p.replace(path.sep, '@')
        return re.sub(line_breaking_chars_pat, '*', p)

    postfix_dir_name = ' '.join(map(prep_path, config_rel_paths))
    if restore_dir:
        restore_dir_root, restore_job_component = _split_log_dir(restore_dir)
        restore_dir_root = restore_dir_root.replace(path.sep, '@')
        restore_job_id = log_date_from_log_dir(restore_job_component)
        postfix_dir_name += ' {restore_prefix}{root}@{job_id}'.format(
                restore_prefix=_RESTORE_PREFIX, root=restore_dir_root, job_id=restore_job_id)
    return _mkdir_threadsafe_unique(log_dir_root, datetime.now(), postfix_dir_name) 
Example 3
Project: imgcomp-cvpr   Author: fab-jul   File: logdir_helpers.py    License: GNU General Public License v3.0 6 votes vote down vote up
def _split_log_dir(log_dir):
    """
    given
        some/path/to/job/dir/0101_1818 ae_config pc_config/ckpts
    or
        some/path/to/job/dir/0101_1818 ae_config pc_config
    returns
        tuple some/path/to/job/dir, 0101_1818 ae_config pc_config
    """
    log_dir_root = []
    job_component = None

    for comp in log_dir.split(path.sep):
        try:
            log_date_from_log_dir(comp)
            job_component = comp
            break  # this component is an actual log dir. stop and return components
        except ValueError:
            log_dir_root.append(comp)

    assert job_component is not None, 'Invalid log_dir: {}'.format(log_dir)
    return path.sep.join(log_dir_root), job_component 
Example 4
Project: imgcomp-cvpr   Author: fab-jul   File: logdir_helpers.py    License: GNU General Public License v3.0 6 votes vote down vote up
def config_paths_from_log_dir(log_dir, base_dirs):
    log_dir = path.basename(log_dir.strip(path.sep))

    # log_dir == {now} {netconfig} {probconfig} [RESTORE@some_dir@XXXX_YYYY], get [netconfig, probconfig]
    comps = log_dir.split(' ')
    assert is_log_date(comps[0]), 'Invalid log_dir: {}'.format(log_dir)
    comps = [c for c in comps[1:] if _RESTORE_PREFIX not in c]
    assert len(comps) <= len(base_dirs), 'Expected as many config components as base dirs: {}, {}'.format(
            comps, base_dirs)

    def get_real_path(base, prepped_p):
        p_glob = prepped_p.replace('@', path.sep)
        p_glob = path.join(base, p_glob)  # e.g., ae_configs/p_glob
        glob_matches = glob.glob(p_glob)
        # We always only replace one character with *, so filter for those.
        # I.e. lr1e-5 will become lr1e*5, which will match lr1e-5 but also lr1e-4.5
        glob_matches_of_same_len = [g for g in glob_matches if len(g) == len(p_glob)]
        if len(glob_matches_of_same_len) != 1:
            raise ValueError('Cannot find config on disk: {} (matches: {})'.format(p_glob, glob_matches_of_same_len))
        return glob_matches_of_same_len[0]

    return tuple(get_real_path(base_dir, comp) for base_dir, comp in zip(base_dirs, comps)) 
Example 5
Project: L3C-PyTorch   Author: fab-jul   File: logdir_helpers.py    License: GNU General Public License v3.0 6 votes vote down vote up
def _split_log_dir(log_dir):
    """
    given
        some/path/to/job/dir/0101_1818 ae_config pc_config/ckpts
    or
        some/path/to/job/dir/0101_1818 ae_config pc_config
    returns
        tuple some/path/to/job/dir, 0101_1818 ae_config pc_config
    """
    log_dir_root = []
    job_component = None

    for comp in log_dir.split(path.sep):
        try:
            log_date_from_log_dir(comp)
            job_component = comp
            break  # this component is an actual log dir. stop and return components
        except ValueError:
            log_dir_root.append(comp)

    assert job_component is not None, 'Invalid log_dir: {}'.format(log_dir)
    return path.sep.join(log_dir_root), job_component 
Example 6
Project: misp42splunk   Author: remg427   File: api_documenter.py    License: GNU Lesser General Public License v3.0 6 votes vote down vote up
def api_get_spec(context, method_list):
    '''Generates and Returns the spec file data
    :param context: Dictionary with app, session, version and api fields
    :type: ```dict```
    :param method_list: List of API methods to call
    :type: ```list```
    :return: generated spec file
    :rtype: ```basestring```
    '''
    _generate_documentation(context, method_list)
    with open(tempfile.gettempdir() + op.sep + 'spec.yaml') as stream:
        try:
            spec_file = yaml.safe_load(stream)
        except yaml.YAMLError as ex:
            raise Exception("Please try again. Exception: {}".format(ex))
        return json.dumps(spec_file) 
Example 7
Project: misp42splunk   Author: remg427   File: api_documenter.py    License: GNU Lesser General Public License v3.0 6 votes vote down vote up
def write_temp(self):
        '''
        Stores changes to the spec in a temp file.
        '''
        spec = {
            "swagger": self.api.__getattribute__('swagger'),
            "info": self.api.__getattribute__('info'),
            "host": self.api.__getattribute__('host'),
            "schemes": self.api.__getattribute__('schemes'),
            "consumes": self.api.__getattribute__('consumes'),
            "produces": self.api.__getattribute__('produces'),
            "paths": self.api.__getattribute__('paths'),
            "definitions": self.api.__getattribute__('definitions')
        }

        stream = file((tempfile.gettempdir() + op.sep + 'temp.yaml'), 'w')
        for x in self.order:
            yaml.dump({x: spec[x]}, stream, default_flow_style=False) 
Example 8
Project: misp42splunk   Author: remg427   File: api_documenter.py    License: GNU Lesser General Public License v3.0 6 votes vote down vote up
def update_spec(self):
        '''
        Updates the specification from the temp file.
        '''
        try:
            os.rename(
                tempfile.gettempdir() +
                op.sep +
                'temp.yaml',
                tempfile.gettempdir() +
                op.sep +
                'spec.yaml')
        except Exception as e:
            raise Exception(
                "Spec file not found, please try again."
                " Exception: {}".format(e)) 
Example 9
Project: misp42splunk   Author: remg427   File: fix_absolute_import.py    License: GNU Lesser General Public License v3.0 6 votes vote down vote up
def probably_a_local_import(self, imp_name):
        """
        Like the corresponding method in the base class, but this also
        supports Cython modules.
        """
        if imp_name.startswith(u"."):
            # Relative imports are certainly not local imports.
            return False
        imp_name = imp_name.split(u".", 1)[0]
        base_path = dirname(self.filename)
        base_path = join(base_path, imp_name)
        # If there is no __init__.py next to the file its not in a package
        # so can't be a relative import.
        if not exists(join(dirname(base_path), "__init__.py")):
            return False
        for ext in [".py", sep, ".pyc", ".so", ".sl", ".pyd", ".pyx"]:
            if exists(base_path + ext):
                return True
        return False 
Example 10
Project: misp42splunk   Author: remg427   File: api_documenter.py    License: GNU Lesser General Public License v3.0 6 votes vote down vote up
def api_get_spec(context, method_list):
    '''Generates and Returns the spec file data
    :param context: Dictionary with app, session, version and api fields
    :type: ```dict```
    :param method_list: List of API methods to call
    :type: ```list```
    :return: generated spec file
    :rtype: ```basestring```
    '''
    _generate_documentation(context, method_list)
    with open(tempfile.gettempdir() + op.sep + 'spec.yaml') as stream:
        try:
            spec_file = yaml.safe_load(stream)
        except yaml.YAMLError as ex:
            raise Exception("Please try again. Exception: {}".format(ex))
        return json.dumps(spec_file) 
Example 11
Project: misp42splunk   Author: remg427   File: api_documenter.py    License: GNU Lesser General Public License v3.0 6 votes vote down vote up
def write_temp(self):
        '''
        Stores changes to the spec in a temp file.
        '''
        spec = {
            "swagger": self.api.__getattribute__('swagger'),
            "info": self.api.__getattribute__('info'),
            "host": self.api.__getattribute__('host'),
            "schemes": self.api.__getattribute__('schemes'),
            "consumes": self.api.__getattribute__('consumes'),
            "produces": self.api.__getattribute__('produces'),
            "paths": self.api.__getattribute__('paths'),
            "definitions": self.api.__getattribute__('definitions')
        }

        stream = file((tempfile.gettempdir() + op.sep + 'temp.yaml'), 'w')
        for x in self.order:
            yaml.dump({x: spec[x]}, stream, default_flow_style=False) 
Example 12
Project: misp42splunk   Author: remg427   File: api_documenter.py    License: GNU Lesser General Public License v3.0 6 votes vote down vote up
def update_spec(self):
        '''
        Updates the specification from the temp file.
        '''
        try:
            os.rename(
                tempfile.gettempdir() +
                op.sep +
                'temp.yaml',
                tempfile.gettempdir() +
                op.sep +
                'spec.yaml')
        except Exception as e:
            raise Exception(
                "Spec file not found, please try again."
                " Exception: {}".format(e)) 
Example 13
Project: misp42splunk   Author: remg427   File: fix_absolute_import.py    License: GNU Lesser General Public License v3.0 6 votes vote down vote up
def probably_a_local_import(self, imp_name):
        """
        Like the corresponding method in the base class, but this also
        supports Cython modules.
        """
        if imp_name.startswith(u"."):
            # Relative imports are certainly not local imports.
            return False
        imp_name = imp_name.split(u".", 1)[0]
        base_path = dirname(self.filename)
        base_path = join(base_path, imp_name)
        # If there is no __init__.py next to the file its not in a package
        # so can't be a relative import.
        if not exists(join(dirname(base_path), "__init__.py")):
            return False
        for ext in [".py", sep, ".pyc", ".so", ".sl", ".pyd", ".pyx"]:
            if exists(base_path + ext):
                return True
        return False 
Example 14
Project: thingsboard-gateway   Author: thingsboard   File: test_odbc_connector.py    License: Apache License 2.0 6 votes vote down vote up
def test_iterator_persistence(self):
        self._create_connector("odbc_iterator.json")
        iterator_file_name = self.connector._OdbcConnector__iterator_file_name

        device_id = 1  # For eval
        data = {"deviceName": eval(self.config["mapping"]["device"]["name"]),
                "deviceType": self.connector._OdbcConnector__connector_type,
                "attributes": [],
                "telemetry": [{"value": 0}]}

        self.gateway.send_to_storage.assert_has_calls([call(self.connector.get_name(), data)])
        self.connector.close()
        sleep(1)

        self.assertTrue(Path(self.CONFIG_PATH + "odbc" + path.sep + iterator_file_name).exists())

        self.connector = OdbcConnector(self.gateway, self.config, "odbc")
        self.connector.open()
        sleep(1)

        data["telemetry"] = [{"value": 5}]
        self.gateway.send_to_storage.assert_has_calls([call(self.connector.get_name(), data)]) 
Example 15
Project: codimension   Author: SergeySatskiy   File: globals.py    License: GNU General Public License v3.0 6 votes vote down vote up
def getSubdirs(path, baseNamesOnly=True, excludePythonModulesDirs=True):
    """Provides a list of sub directories for the given path"""
    subdirs = []
    try:
        path = realpath(path) + sep
        for item in os.listdir(path):
            candidate = path + item
            if isdir(candidate):
                if excludePythonModulesDirs:
                    modFile = candidate + sep + "__init__.py"
                    if exists(modFile):
                        continue
                if baseNamesOnly:
                    subdirs.append(item)
                else:
                    subdirs.append(candidate)
    except:
        pass
    return subdirs 
Example 16
Project: codimension   Author: SergeySatskiy   File: findinfilesdialog.py    License: GNU General Public License v3.0 6 votes vote down vote up
def __matchInDir(path, filters, startTime):
        """Provides the 'match' and 'too long' statuses"""
        matched = False
        tooLong = False
        for item in listdir(path):
            if time.time() - startTime > 0.1:
                tooLong = True
                return matched, tooLong
            if isdir(path + item):
                dname = path + item + sep
                matched, tooLong = FindInFilesDialog.__matchInDir(dname,
                                                                  filters,
                                                                  startTime)
                if matched or tooLong:
                    return matched, tooLong
                continue
            if FindInFilesDialog.__filterMatch(filters, path + item):
                matched = True
                return matched, tooLong
        return matched, tooLong 
Example 17
Project: codimension   Author: SergeySatskiy   File: findinfilesdialog.py    License: GNU General Public License v3.0 6 votes vote down vote up
def __projectFiles(self, filters):
        """Project files list respecting the mask"""
        mainWindow = GlobalData().mainWindow
        files = []
        for fname in GlobalData().project.filesList:
            if fname.endswith(sep):
                continue
            if self.__filterMatch(filters, fname):
                widget = mainWindow.getWidgetForFileName(fname)
                if widget is None:
                    # Do not check for broken symlinks
                    if isFileSearchable(fname, False):
                        files.append(ItemToSearchIn(fname, ""))
                else:
                    if widget.getType() in \
                                [MainWindowTabWidgetBase.PlainTextEditor]:
                        files.append(ItemToSearchIn(fname,
                                                    widget.getUUID()))
            QApplication.processEvents()
            if self.__cancelRequest:
                raise Exception("Cancel request")
        return files 
Example 18
Project: pytorch_geometric   Author: rusty1s   File: shapenet.py    License: MIT License 6 votes vote down vote up
def process_filenames(self, filenames):
        data_list = []
        categories_ids = [self.category_ids[cat] for cat in self.categories]
        cat_idx = {categories_ids[i]: i for i in range(len(categories_ids))}

        for name in filenames:
            cat = name.split(osp.sep)[0]
            if cat not in categories_ids:
                continue

            data = read_txt_array(osp.join(self.raw_dir, name))
            pos = data[:, :3]
            x = data[:, 3:6]
            y = data[:, -1].type(torch.long)
            data = Data(pos=pos, x=x, y=y, category=cat_idx[cat])
            if self.pre_filter is not None and not self.pre_filter(data):
                continue
            if self.pre_transform is not None:
                data = self.pre_transform(data)
            data_list.append(data)

        return data_list 
Example 19
Project: pybids   Author: bids-standard   File: test_transformations.py    License: MIT License 6 votes vote down vote up
def test_orthogonalize_dense(collection):
    transform.Factor(collection, 'trial_type', sep=sep)

    sampling_rate = collection.sampling_rate
    # Store pre-orth variables needed for tests
    pg_pre = collection['trial_type/parametric gain'].to_dense(sampling_rate)
    rt = collection['RT'].to_dense(sampling_rate)

    # Orthogonalize and store result
    transform.Orthogonalize(collection, variables='trial_type/parametric gain',
                            other='RT', dense=True, groupby=['run', 'subject'])
    pg_post = collection['trial_type/parametric gain']

    # Verify that the to_dense() calls result in identical indexing
    ent_cols = ['subject', 'run']
    assert pg_pre.to_df()[ent_cols].equals(rt.to_df()[ent_cols])
    assert pg_post.to_df()[ent_cols].equals(rt.to_df()[ent_cols])

    vals = np.c_[rt.values, pg_pre.values, pg_post.values]
    df = pd.DataFrame(vals, columns=['rt', 'pre', 'post'])
    groupby = rt.get_grouper(['run', 'subject'])
    pre_r = df.groupby(groupby).apply(lambda x: x.corr().iloc[0, 1])
    post_r = df.groupby(groupby).apply(lambda x: x.corr().iloc[0, 2])
    assert (pre_r > 0.2).any()
    assert (post_r < 0.0001).all() 
Example 20
Project: pytgbot   Author: luckydonald   File: code_generator_online.py    License: GNU General Public License v3.0 6 votes vote down vote up
def calc_path_and_create_folders(folder, import_path, create_folder=True):
    """
    calculate the path and create the needed folders

    >>> calc_path_and_create_folders(folder='/somewhere/', import_path='foo.bar.BarClass', create_folder=False)
    '/somewhere/foo/bar/BarClass'

    :param import_path:  'foo.bar.BarClass'
    :param folder: base folder where we wanna place 'foo.bar.BarClass' in.
     """
    file_path = abspath(path_join(folder, import_path[:import_path.rfind(".")].replace(".", folder_seperator) + ".py"))
    if create_folder:
        mkdir_p(dirname(file_path))
    # end if
    return file_path
# end def 
Example 21
Project: airflow   Author: apache   File: build_provider_packages_dependencies.py    License: Apache License 2.0 6 votes vote down vote up
def get_provider_from_file_name(file_name: str) -> Optional[str]:
    """
    Retrieves provider name from file name
    :param file_name: name of the file
    :return: provider name or None if no provider could be found
    """
    if AIRFLOW_PROVIDERS_FILE_PREFIX not in file_name and \
            AIRFLOW_TESTS_PROVIDERS_FILE_PREFIX not in file_name:
        # We should only check file that are provider
        errors.append(f"Wrong file not in the providers package = {file_name}")
        return None
    suffix = get_file_suffix(file_name)
    split_path = suffix.split(sep)[2:]
    provider = find_provider(split_path)
    if not provider and file_name.endswith("__init__.py"):
        infos.append(f"Skipped file = {file_name}")
    elif not provider:
        warnings.append(f"Provider not found for path = {file_name}")
    return provider 
Example 22
Project: deepWordBug   Author: QData   File: fix_absolute_import.py    License: Apache License 2.0 6 votes vote down vote up
def probably_a_local_import(self, imp_name):
        """
        Like the corresponding method in the base class, but this also
        supports Cython modules.
        """
        if imp_name.startswith(u"."):
            # Relative imports are certainly not local imports.
            return False
        imp_name = imp_name.split(u".", 1)[0]
        base_path = dirname(self.filename)
        base_path = join(base_path, imp_name)
        # If there is no __init__.py next to the file its not in a package
        # so can't be a relative import.
        if not exists(join(dirname(base_path), "__init__.py")):
            return False
        for ext in [".py", sep, ".pyc", ".so", ".sl", ".pyd", ".pyx"]:
            if exists(base_path + ext):
                return True
        return False 
Example 23
Project: pipenv   Author: pypa   File: loaders.py    License: MIT License 6 votes vote down vote up
def list_templates(self):
        found = set()
        for searchpath in self.searchpath:
            walk_dir = os.walk(searchpath, followlinks=self.followlinks)
            for dirpath, _, filenames in walk_dir:
                for filename in filenames:
                    template = (
                        os.path.join(dirpath, filename)[len(searchpath) :]
                        .strip(os.path.sep)
                        .replace(os.path.sep, "/")
                    )
                    if template[:2] == "./":
                        template = template[2:]
                    if template not in found:
                        found.add(template)
        return sorted(found) 
Example 24
Project: kgsgo-dataset-preprocessor   Author: hughperkins   File: fix_absolute_import.py    License: Mozilla Public License 2.0 6 votes vote down vote up
def probably_a_local_import(self, imp_name):
        """
        Like the corresponding method in the base class, but this also
        supports Cython modules.
        """
        if imp_name.startswith(u"."):
            # Relative imports are certainly not local imports.
            return False
        imp_name = imp_name.split(u".", 1)[0]
        base_path = dirname(self.filename)
        base_path = join(base_path, imp_name)
        # If there is no __init__.py next to the file its not in a package
        # so can't be a relative import.
        if not exists(join(dirname(base_path), "__init__.py")):
            return False
        for ext in [".py", sep, ".pyc", ".so", ".sl", ".pyd", ".pyx"]:
            if exists(base_path + ext):
                return True
        return False 
Example 25
Project: spleeter   Author: deezer   File: dataset.py    License: MIT License 5 votes vote down vote up
def expand_path(self, sample):
        """ Expands audio paths for the given sample. """
        return dict(sample, **{f'{instrument}_path': tf.string_join(
            (self._audio_path, sample[f'{instrument}_path']), SEPARATOR)
            for instrument in self._instruments}) 
Example 26
Project: delocate   Author: matthew-brett   File: delocate_listdeps.py    License: BSD 2-Clause "Simplified" License 5 votes vote down vote up
def main():
    parser = OptionParser(
        usage="%s WHEEL_OR_PATH_TO_ANALYZE\n\n" % sys.argv[0] + __doc__,
        version="%prog " + __version__)
    parser.add_options([
        Option("-a", "--all",
               action="store_true",
               help="Show all dependencies, including system libs"),
        Option("-d", "--depending",
               action="store_true",
               help="Show libraries depending on dependencies")])
    (opts, paths) = parser.parse_args()
    if len(paths) < 1:
        parser.print_help()
        sys.exit(1)

    multi = len(paths) > 1
    for path in paths:
        if multi:
            print(path + ':')
            indent = '   '
        else:
            indent = ''
        if isdir(path):
            lib_dict = tree_libs(path)
            lib_dict = stripped_lib_dict(lib_dict, realpath(getcwd()) + psep)
        else:
            lib_dict = wheel_libs(path)
        keys = sorted(lib_dict)
        if not opts.all:
            keys = [key for key in keys if filter_system_libs(key)]
        if not opts.depending:
            if len(keys):
                print(indent + ('\n' + indent).join(keys))
            continue
        i2 = indent + '    '
        for key in keys:
            print(indent + key + ':')
            libs = lib_dict[key]
            if len(libs):
                print(i2 + ('\n' + i2).join(libs)) 
Example 27
Project: mealpy   Author: edmundmok   File: venv_update.py    License: MIT License 5 votes vote down vote up
def timid_relpath(arg):
    """convert an argument to a relative path, carefully"""
    # TODO-TEST: unit tests
    from os.path import isabs, relpath, sep
    if isabs(arg):
        result = relpath(arg)
        if result.count(sep) + 1 < arg.count(sep):
            return result

    return arg 
Example 28
Project: calmjs   Author: calmjs   File: base.py    License: GNU General Public License v2.0 5 votes vote down vote up
def which_with_node_modules(self):
        """
        Which with node_path and node_modules
        """

        if self.binary is None:
            return None

        # first, log down the pedantic things...
        if isdir(self.join_cwd(NODE_MODULES)):
            logger.debug(
                "'%s' instance will attempt to locate '%s' binary from "
                "%s%s%s%s%s, located through the working directory",
                self.__class__.__name__, self.binary,
                self.join_cwd(), sep, NODE_MODULES, sep, NODE_MODULES_BIN,
            )
        if self.node_path:
            logger.debug(
                "'%s' instance will attempt to locate '%s' binary from "
                "its %s of %s",
                self.__class__.__name__, self.binary,
                NODE_PATH, self.node_path,
            )

        paths = self.find_node_modules_basedir()
        whichpaths = pathsep.join(join(p, NODE_MODULES_BIN) for p in paths)

        if paths:
            logger.debug(
                "'%s' instance located %d possible paths to the '%s' binary, "
                "which are %s",
                self.__class__.__name__, len(paths), self.binary, whichpaths,
            )

        return which(self.binary, path=whichpaths) 
Example 29
Project: imgcomp-cvpr   Author: fab-jul   File: val_images.py    License: GNU General Public License v3.0 5 votes vote down vote up
def get_path_component_before_glob(p):
    """ Given some path ending in one or more components containing *, return the left-most non-empty component not
    containig *, e.g., /some/path/dir/*/*/*.png => dir """
    for comp in reversed(p.strip(path.sep).split(path.sep)):
        if '*' not in comp:
            return comp
    raise ValueError('No component without *: {}'.format(p)) 
Example 30
Project: L3C-PyTorch   Author: fab-jul   File: logdir_helpers.py    License: GNU General Public License v3.0 5 votes vote down vote up
def create_unique_log_dir(config_rel_paths, log_dir_root, line_breaking_chars_pat=r'[-]',
                          postfix=None, restore_dir=None, strip_ext=None):
    """
    0117_1704 repr@soa3_med_8e*5_deePer_b50_noHM_C16 repr@v2_res_shallow r@0115_1340
    :param config_rel_paths: paths to the configs, relative to the config root dir
    :param log_dir_root: In this directory, all log dirs are stored. Created if needed.
    :param line_breaking_chars_pat:
    :param postfix: appended to the returned log dir
    :param restore_dir: if given, expected to be a log dir. the JOB_ID of that will be appended
    :param strip_ext: if given, do not store extension `strip_ext` of config_rel_paths
    :return: path to a newly created directory
    """
    if any('@' in config_rel_path for config_rel_path in config_rel_paths):
        raise ValueError('"@" not allowed in paths, got {}'.format(config_rel_paths))

    if strip_ext:
        assert all(strip_ext in c for c in config_rel_paths)
        config_rel_paths = [c.replace(strip_ext, '') for c in config_rel_paths]

    def prep_path(p):
        p = p.replace(path.sep, '@')
        return re.sub(line_breaking_chars_pat, '*', p)

    postfix_dir_name = ' '.join(map(prep_path, config_rel_paths))
    if restore_dir:
        _, restore_job_component = _split_log_dir(restore_dir)
        restore_job_id = log_date_from_log_dir(restore_job_component)
        postfix_dir_name += ' {restore_prefix}{job_id}'.format(
                restore_prefix=_RESTORE_PREFIX, job_id=restore_job_id)
    if postfix:
        if isinstance(postfix, list):
            postfix = ' '.join(postfix)
        postfix_dir_name += ' ' + postfix
    return _mkdir_threadsafe_unique(log_dir_root, datetime.now(), postfix_dir_name)