Python os.path.dirname() Examples

The following are 30 code examples of os.path.dirname(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module os.path , or try the search function .
Example #1
Source File: test_pep8.py    From hydrus with MIT License 8 votes vote down vote up
def test_pep8(self):
        """Test method to check PEP8 compliance over the entire project."""
        self.file_structure = dirname(dirname(abspath(__file__)))
        print("Testing for PEP8 compliance of python files in {}".format(
            self.file_structure))
        style = pep8.StyleGuide()
        style.options.max_line_length = 100  # Set this to desired maximum line length
        filenames = []
        # Set this to desired folder location
        for root, _, files in os.walk(self.file_structure):
            python_files = [f for f in files if f.endswith(
                '.py') and "examples" not in root]
            for file in python_files:
                if len(root.split('samples')) != 2:     # Ignore samples directory
                    filename = '{0}/{1}'.format(root, file)
                    filenames.append(filename)
        check = style.check_files(filenames)
        self.assertEqual(check.total_errors, 0, 'PEP8 style errors: %d' %
                         check.total_errors) 
Example #2
Source File: __init__.py    From ALF with Apache License 2.0 6 votes vote down vote up
def import_helper():
    from os.path import dirname
    import imp
    possible_libs = ["_alf_grammar.win32",
                     "_alf_grammar.ntoarm",
                     "_alf_grammar.ntox86",
                     "_alf_grammar.linux"]
    found_lib = False
    for i in possible_libs:
        fp = None
        try:
            fp, pathname, description = imp.find_module(i, [dirname(__file__)])
            _mod = imp.load_module("_alf_grammar", fp, pathname, description)
            found_lib = True
            break
        except ImportError:
            pass
        finally:
            if fp:
                fp.close()
    if not found_lib:
        raise ImportError("Failed to load _alf_grammar module")
    return _mod 
Example #3
Source File: venv_update.py    From mealpy with MIT License 6 votes vote down vote up
def has_system_site_packages(interpreter):
    # TODO: unit-test
    system_site_packages = check_output((
        interpreter,
        '-c',
        # stolen directly from virtualenv's site.py
        """\
import site, os.path
print(
    0
    if os.path.exists(
        os.path.join(os.path.dirname(site.__file__), 'no-global-site-packages.txt')
    ) else
    1
)"""
    ))
    system_site_packages = int(system_site_packages)
    assert system_site_packages in (0, 1)
    return bool(system_site_packages) 
Example #4
Source File: toolchain.py    From calmjs with GNU General Public License v2.0 6 votes vote down vote up
def compile_bundle_entry(self, spec, entry):
        """
        Handler for each entry for the bundle method of the compile
        process.  This copies the source file or directory into the
        build directory.
        """

        modname, source, target, modpath = entry
        bundled_modpath = {modname: modpath}
        bundled_target = {modname: target}
        export_module_name = []
        if isfile(source):
            export_module_name.append(modname)
            copy_target = join(spec[BUILD_DIR], target)
            if not exists(dirname(copy_target)):
                makedirs(dirname(copy_target))
            shutil.copy(source, copy_target)
        elif isdir(source):
            copy_target = join(spec[BUILD_DIR], modname)
            shutil.copytree(source, copy_target)

        return bundled_modpath, bundled_target, export_module_name 
Example #5
Source File: test_artifact.py    From calmjs with GNU General Public License v2.0 6 votes vote down vote up
def test_existing_removed(self):
        # force an existing file
        target = self.registry.records[('app', 'nothing.js')]
        os.mkdir(dirname(target))
        with open(target, 'w'):
            pass

        with pretty_logging(stream=mocks.StringIO()) as stream:
            self.registry.process_package('app')

        log = stream.getvalue()
        self.assertIn(
            "package 'app' has declared 3 entry points for the "
            "'calmjs.artifacts' registry for artifact construction", log
        )
        log = stream.getvalue()
        self.assertIn("removing existing export target at ", log)
        self.assertFalse(exists(target)) 
Example #6
Source File: mx_javamodules.py    From mx with GNU General Public License v2.0 6 votes vote down vote up
def get_jmod_path(self, respect_stripping=True, alt_module_info_name=None):
        """
        Gets the path to the .jmod file corresponding to this module descriptor.

        :param bool respect_stripping: Specifies whether or not to return a path
               to a stripped .jmod file if this module is based on a dist
        """
        if respect_stripping and self.dist is not None:
            assert alt_module_info_name is None, 'alternate modules not supported for stripped dist ' + self.dist.name
            return join(dirname(self.dist.path), self.name + '.jmod')
        if self.dist is not None:
            qualifier = '_' + alt_module_info_name if alt_module_info_name else ''
            return join(dirname(self.dist.original_path()), self.name + qualifier + '.jmod')
        if self.jarpath:
            return join(dirname(self.jarpath), self.name + '.jmod')
        assert self.jdk, self.name
        p = join(self.jdk.home, 'jmods', self.name + '.jmod')
        assert exists(p), p
        return p 
Example #7
Source File: __init__.py    From aws-ops-automator with Apache License 2.0 6 votes vote down vote up
def all_handlers():
    global __actions
    if __actions is None:
        __actions = []
        current = abspath(os.getcwd())
        while True:
            if isdir(os.path.join(current, "handlers")):
                break
            parent = dirname(current)
            if parent == current:
                # at top level
                raise Exception("Could not find handlers directory")
            else:
                current = parent

        for f in listdir(os.path.join(current, "handlers")):
            if isfile(join(current, "handlers", f)) and f.endswith("_{}.py".format(HANDLER.lower())):
                module_name = HANDLERS_MODULE_NAME.format(f[0:-len(".py")])
                m = _get_module(module_name)
                cls = _get_handler_class(m)
                if cls is not None:
                    handler_name = cls[0]
                    __actions.append(handler_name)
    return __actions 
Example #8
Source File: test_0091_tddft_x_zip_na20.py    From pyscf with Apache License 2.0 6 votes vote down vote up
def test_x_zip_feature_na20_chain(self):
    """ This a test for compression of the eigenvectos at higher energies """
    dname = dirname(abspath(__file__))
    siesd = dname+'/sodium_20'
    x = td_c(label='siesta', cd=siesd,x_zip=True, x_zip_emax=0.25,x_zip_eps=0.05,jcutoff=7,xc_code='RPA',nr=128, fermi_energy=-0.0913346431431985)
    
    eps = 0.005
    ww = np.arange(0.0, 0.5, eps/2.0)+1j*eps
    data = np.array([ww.real*27.2114, -x.comp_polariz_inter_ave(ww).imag])
    fname = 'na20_chain.tddft_iter_rpa.omega.inter.ave.x_zip.txt'
    np.savetxt(fname, data.T, fmt=['%f','%f'])
    #print(__file__, fname)
    data_ref = np.loadtxt(dname+'/'+fname+'-ref')
    #print('    x.rf0_ncalls ', x.rf0_ncalls)
    #print(' x.matvec_ncalls ', x.matvec_ncalls)
    self.assertTrue(np.allclose(data_ref,data.T, rtol=1.0e-1, atol=1e-06)) 
Example #9
Source File: test_0004_vna.py    From pyscf with Apache License 2.0 6 votes vote down vote up
def test_vna_lih(self):
    dname = dirname(abspath(__file__))
    n = nao(label='lih', cd=dname)
    m = 200
    dvec,midv = 2*(n.atom2coord[1] - n.atom2coord[0])/m,  (n.atom2coord[1] + n.atom2coord[0])/2.0
    vgrid = np.tensordot(np.array(range(-m,m+1)), dvec, axes=0) + midv
    sgrid = np.array(range(-m,m+1)) * np.sqrt((dvec*dvec).sum())
    
    
    #vgrid = np.array([[-1.517908564663352e+00, 1.180550033093826e+00,0.000000000000000e+00]])
    vna = n.vna(vgrid)
    
    #for v,r in zip(vna,vgrid):
    #  print("%23.15e %23.15e %23.15e %23.15e"%(r[0], r[1], r[2], v))
    
    #print(vna.shape, sgrid.shape)
    np.savetxt('vna_lih_0004.txt', np.row_stack((sgrid, vna)).T)
    ref = np.loadtxt(dname+'/vna_lih_0004.txt-ref')
    for r,d in zip(ref[:,1],vna): self.assertAlmostEqual(r,d) 
Example #10
Source File: mdbt.py    From ConvLab with MIT License 6 votes vote down vote up
def cached_path(file_path, cached_dir=None):
    if not cached_dir:
        cached_dir = str(Path(Path.home() / '.tatk') / "cache")

    return allennlp_cached_path(file_path, cached_dir)

# DATA_PATH = os.path.join(os.path.dirname(os.path.dirname(os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))))), 'data/mdbt')
# VALIDATION_URL = os.path.join(DATA_PATH, "data/validate.json")
# WORD_VECTORS_URL = os.path.join(DATA_PATH, "word-vectors/paragram_300_sl999.txt")
# TRAINING_URL = os.path.join(DATA_PATH, "data/train.json")
# ONTOLOGY_URL = os.path.join(DATA_PATH, "data/ontology.json")
# TESTING_URL = os.path.join(DATA_PATH, "data/test.json")
# MODEL_URL = os.path.join(DATA_PATH, "models/model-1")
# GRAPH_URL = os.path.join(DATA_PATH, "graphs/graph-1")
# RESULTS_URL = os.path.join(DATA_PATH, "results/log-1.txt")
# KB_URL = os.path.join(DATA_PATH, "data/")  # TODO: yaoqin
# TRAIN_MODEL_URL = os.path.join(DATA_PATH, "train_models/model-1")
# TRAIN_GRAPH_URL = os.path.join(DATA_PATH, "train_graph/graph-1") 
Example #11
Source File: domainUtil.py    From hsds with Apache License 2.0 6 votes vote down vote up
def getParentDomain(domain):
    """Get parent domain of given domain.
    E.g. getParentDomain("www.hdfgroup.org") returns "hdfgroup.org"
    Return None if the given domain is already a top-level domain.
    """
    if domain.endswith(DOMAIN_SUFFIX):
        n = len(DOMAIN_SUFFIX) - 1
        domain = domain[:-n]

    bucket = getBucketForDomain(domain)
    domain_path = getPathForDomain(domain)
    if len(domain_path) > 1 and domain_path[-1] == '/':
        domain_path = domain_path[:-1]
    dirname = op.dirname(domain_path)
    if bucket:
        parent = bucket + dirname
    else:
        parent = dirname


    if not parent:
        parent = None
    return parent 
Example #12
Source File: domainUtil.py    From hsds with Apache License 2.0 6 votes vote down vote up
def getParentDomain(domain):
    """Get parent domain of given domain.
    E.g. getParentDomain("www.hdfgroup.org") returns "hdfgroup.org"
    Return None if the given domain is already a top-level domain.
    """
    if domain.endswith(DOMAIN_SUFFIX):
        n = len(DOMAIN_SUFFIX) - 1
        domain = domain[:-n]

    bucket = getBucketForDomain(domain)
    domain_path = getPathForDomain(domain)
    if len(domain_path) > 1 and domain_path[-1] == '/':
        domain_path = domain_path[:-1]
    dirname = op.dirname(domain_path)
    if bucket:
        parent = bucket + dirname
    else:
        parent = dirname


    if not parent:
        parent = None
    return parent 
Example #13
Source File: ta_data_loader.py    From misp42splunk with GNU Lesser General Public License v3.0 6 votes vote down vote up
def _read_default_settings():
        cur_dir = op.dirname(op.abspath(__file__))
        setting_file = op.join(cur_dir,"../../","splunktalib", "setting.conf")
        parser = configparser.ConfigParser()
        parser.read(setting_file)
        settings = {}
        keys = ("process_size", "thread_min_size", "thread_max_size",
                "task_queue_size")
        for option in keys:
            try:
                settings[option] = parser.get("global", option)
            except configparser.NoOptionError:
                settings[option] = -1

            try:
                settings[option] = int(settings[option])
            except ValueError:
                settings[option] = -1
        log.logger.debug("settings: %s", settings)
        return settings 
Example #14
Source File: fix_absolute_import.py    From misp42splunk with GNU Lesser General Public License v3.0 6 votes vote down vote up
def probably_a_local_import(self, imp_name):
        """
        Like the corresponding method in the base class, but this also
        supports Cython modules.
        """
        if imp_name.startswith(u"."):
            # Relative imports are certainly not local imports.
            return False
        imp_name = imp_name.split(u".", 1)[0]
        base_path = dirname(self.filename)
        base_path = join(base_path, imp_name)
        # If there is no __init__.py next to the file its not in a package
        # so can't be a relative import.
        if not exists(join(dirname(base_path), "__init__.py")):
            return False
        for ext in [".py", sep, ".pyc", ".so", ".sl", ".pyd", ".pyx"]:
            if exists(base_path + ext):
                return True
        return False 
Example #15
Source File: ta_data_loader.py    From misp42splunk with GNU Lesser General Public License v3.0 6 votes vote down vote up
def _read_default_settings():
        cur_dir = op.dirname(op.abspath(__file__))
        setting_file = op.join(cur_dir,"../../","splunktalib", "setting.conf")
        parser = configparser.ConfigParser()
        parser.read(setting_file)
        settings = {}
        keys = ("process_size", "thread_min_size", "thread_max_size",
                "task_queue_size")
        for option in keys:
            try:
                settings[option] = parser.get("global", option)
            except configparser.NoOptionError:
                settings[option] = -1

            try:
                settings[option] = int(settings[option])
            except ValueError:
                settings[option] = -1
        log.logger.debug("settings: %s", settings)
        return settings 
Example #16
Source File: fix_absolute_import.py    From misp42splunk with GNU Lesser General Public License v3.0 6 votes vote down vote up
def probably_a_local_import(self, imp_name):
        """
        Like the corresponding method in the base class, but this also
        supports Cython modules.
        """
        if imp_name.startswith(u"."):
            # Relative imports are certainly not local imports.
            return False
        imp_name = imp_name.split(u".", 1)[0]
        base_path = dirname(self.filename)
        base_path = join(base_path, imp_name)
        # If there is no __init__.py next to the file its not in a package
        # so can't be a relative import.
        if not exists(join(dirname(base_path), "__init__.py")):
            return False
        for ext in [".py", sep, ".pyc", ".so", ".sl", ".pyd", ".pyx"]:
            if exists(base_path + ext):
                return True
        return False 
Example #17
Source File: setup.py    From libTLDA with MIT License 5 votes vote down vote up
def read(fname):
    """Read filename"""
    return open(os.path.join(os.path.dirname(__file__), fname)).read() 
Example #18
Source File: setup.py    From BiblioPixelAnimations with MIT License 5 votes vote down vote up
def _get_version():
    from os.path import abspath, dirname, join
    filename = join(dirname(abspath(__file__)), 'VERSION')
    print('Reading version from {}'.format(filename))
    version = open(filename).read().strip()
    print('Version: {}'.format(version))
    return version 
Example #19
Source File: test_sampler.py    From mmdetection with Apache License 2.0 5 votes vote down vote up
def _context_for_ohem():
    import sys
    from os.path import dirname
    sys.path.insert(0, dirname(dirname(dirname(__file__))))
    from test_forward import _get_detector_cfg

    model, train_cfg, test_cfg = _get_detector_cfg(
        'faster_rcnn/faster_rcnn_r50_fpn_ohem_1x_coco.py')
    model['pretrained'] = None

    from mmdet.models import build_detector
    context = build_detector(
        model, train_cfg=train_cfg, test_cfg=test_cfg).roi_head
    return context 
Example #20
Source File: test_config.py    From mmdetection with Apache License 2.0 5 votes vote down vote up
def _get_config_directory():
    """Find the predefined detector config directory."""
    try:
        # Assume we are running in the source mmdetection repo
        repo_dpath = dirname(dirname(__file__))
    except NameError:
        # For IPython development when this __file__ is not defined
        import mmdet
        repo_dpath = dirname(dirname(mmdet.__file__))
    config_dpath = join(repo_dpath, 'configs')
    if not exists(config_dpath):
        raise Exception('Cannot find config path')
    return config_dpath 
Example #21
Source File: test_forward.py    From mmdetection with Apache License 2.0 5 votes vote down vote up
def _get_config_directory():
    """Find the predefined detector config directory."""
    try:
        # Assume we are running in the source mmdetection repo
        repo_dpath = dirname(dirname(dirname(__file__)))
    except NameError:
        # For IPython development when this __file__ is not defined
        import mmdet
        repo_dpath = dirname(dirname(mmdet.__file__))
    config_dpath = join(repo_dpath, 'configs')
    if not exists(config_dpath):
        raise Exception('Cannot find config path')
    return config_dpath 
Example #22
Source File: bids.py    From NiBetaSeries with MIT License 5 votes vote down vote up
def _run_interface(self, runtime):
        import json
        import os.path as op
        import pkg_resources
        from bids.layout import parse_file_entities
        from bids.layout.writing import build_path

        deriv_cfg = pkg_resources.resource_string("nibetaseries",
                                                  op.join("data", "derivatives.json"))
        deriv_patterns = json.loads(deriv_cfg.decode('utf-8'))['fmriprep_path_patterns']

        subject_entities = parse_file_entities(self.inputs.source_file)
        betaseries_entities = parse_file_entities(self.inputs.in_file)
        # hotfix
        betaseries_entities['description'] = betaseries_entities['desc']

        subject_entities.update(betaseries_entities)

        out_file = build_path(subject_entities, deriv_patterns)

        if not out_file:
            raise ValueError("the provided entities do not make a valid file")

        base_directory = runtime.cwd
        if isdefined(self.inputs.base_directory):
            base_directory = os.path.abspath(self.inputs.base_directory)

        out_path = op.join(base_directory, self.out_path_base, out_file)

        os.makedirs(op.dirname(out_path), exist_ok=True)

        # copy the file to the output directory
        copy(self.inputs.in_file, out_path)

        self._results['out_file'] = out_path

        return runtime 
Example #23
Source File: conftest.py    From NiBetaSeries with MIT License 5 votes vote down vote up
def sub_bids(bids_dir, example_file=bids_bold_fname):
    sub_dir = op.dirname(example_file)

    return bids_dir.ensure(sub_dir,
                           dir=True) 
Example #24
Source File: conftest.py    From NiBetaSeries with MIT License 5 votes vote down vote up
def sub_fmriprep(deriv_dir, example_file=deriv_bold_fname):
    sub_dir = op.dirname(example_file)

    return deriv_dir.ensure(sub_dir,
                            dir=True) 
Example #25
Source File: setup.py    From tvdbsimple with GNU General Public License v3.0 5 votes vote down vote up
def read(fname):

    here = path.join(path.abspath(path.dirname(__file__)), fname)
    txt = ''
    if (path.isfile(here)):
        # Get the long description from the README file
        with open(here, encoding='utf-8') as f:
            txt= f.read()
    return txt 
Example #26
Source File: run.py    From find_forks with MIT License 5 votes vote down vote up
def main():
    """Main function to run as shell script."""
    loader = unittest.TestLoader()
    suite = loader.discover(path.abspath(path.dirname(__file__)), pattern='test_*.py')
    runner = unittest.TextTestRunner(buffer=True)
    runner.run(suite) 
Example #27
Source File: clean.py    From alibuild with GNU General Public License v3.0 5 votes vote down vote up
def decideClean(workDir, architecture, aggressiveCleanup):
  """ Decides what to delete, without actually doing it:
      - Find all the symlinks in "BUILD"
      - Find all the directories in "BUILD"
      - Schedule a directory for deletion if it does not have a symlink
  """
  symlinksBuild = [os.readlink(x) for x in glob.glob("%s/BUILD/*-latest*" % workDir)]
  # $WORK_DIR/TMP should always be cleaned up. This does not happen only
  # in the case we run out of space while unpacking.
  # $WORK_DIR/<architecture>/store can be cleaned up as well, because
  # we do not need the actual tarballs after they have been built.
  toDelete = ["%s/TMP" % workDir]
  if aggressiveCleanup:
    toDelete += ["%s/TARS/%s/store" % (workDir, architecture),
                 "%s/SOURCES" % (workDir)]
  allBuildStuff = glob.glob("%s/BUILD/*" % workDir)
  toDelete += [x for x in allBuildStuff
               if not path.islink(x) and not basename(x) in symlinksBuild]
  installGlob ="%s/%s/*/" % (workDir, architecture)
  installedPackages = set([dirname(x) for x in glob.glob(installGlob)])
  symlinksInstall = []
  for x in installedPackages:
    symlinksInstall += [path.realpath(y) for y in glob.glob(x + "/latest*")]
  toDelete += [x for x in glob.glob(installGlob+ "*")
               if not path.islink(x) and not path.realpath(x) in symlinksInstall]
  toDelete = [x for x in toDelete if path.exists(x)]
  return toDelete 
Example #28
Source File: utilities.py    From alibuild with GNU General Public License v3.0 5 votes vote down vote up
def getVersion():
  try:
    import pkg_resources  # part of setuptools
    return pkg_resources.require("alibuild")[0].version
  except:
    cmd = "GIT_DIR=\'%s/.git\' git describe --tags" % dirname(dirname(__file__))
    err, version = getstatusoutput(cmd)
    return version if not err else "Unknown version." 
Example #29
Source File: toolchain.py    From calmjs with GNU General Public License v2.0 5 votes vote down vote up
def _generate_transpile_target(self, spec, target):
        # ensure that the target is fully normalized.
        bd_target = join(spec[BUILD_DIR], normpath(target))
        self._validate_build_target(spec, bd_target)
        if not exists(dirname(bd_target)):
            logger.debug("creating dir '%s'", dirname(bd_target))
            makedirs(dirname(bd_target))

        return bd_target 
Example #30
Source File: test_artifact.py    From calmjs with GNU General Public License v2.0 5 votes vote down vote up
def test_grandparent_not_removed(self):
        with open(dirname(self.registry.records[('bad', 'bad.js')]), 'w'):
            pass

        with self.assertRaises(ToolchainAbort):
            with pretty_logging(stream=mocks.StringIO()) as stream:
                self.registry.process_package('bad')

        log = stream.getvalue()
        self.assertIn("its dirname does not lead to a directory", log)