Python os.path.basename() Examples

The following are 30 code examples for showing how to use os.path.basename(). These examples are extracted from open source projects. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example.

You may check out the related API usage on the sidebar.

You may also want to check out all available functions/classes of the module os.path , or try the search function .

Example 1
Project: incubator-spot   Author: apache   File: file_watcher.py    License: Apache License 2.0 6 votes vote down vote up
def detect(self, newfile):
        '''
            Called when a new file is generated under the monitoring directory.

        :param newfile: Path to file created recently.
        '''
        self._logger.info(' -------- New File Detected! -------- ')

        filename = basename(newfile)
        # .............................check whether the filename is in the supported list
        if any([x.search(filename) for x in self._regexs]) or not self._regexs:
            self._queue.insert(0, newfile)
            self._logger.info('File "{0}" added to the queue.'.format(newfile))
            return

        self._logger.warning('Filename "%s" is not supported! Skip file...' % filename) 
Example 2
Project: incubator-spot   Author: apache   File: file_watcher.py    License: Apache License 2.0 6 votes vote down vote up
def stop(self):
        '''
            Signals the current thread to stop and waits until it terminates. This blocks
        the calling thread until it terminates -- either normally or through an unhandled
        exception.

        :raises RuntimeError: If an attempt is made to join the current thread as that
                              would cause a deadlock. It is also an error to join() a
                              thread before it has been started and attemps to do so
                              raises the same exception.
        '''
        self._logger.info('Signal {0} thread to stop normally.'.format(str(self)))
        super(FileWatcher, self).stop()

        self._logger.info('Wait until the {0} thread terminates...'.format(str(self)))
        super(FileWatcher, self).join()

        while not self.is_empty:
            self._logger.debug('Drop "%s" from the queue.' % basename(self._queue.pop()))

        assert self.is_empty, 'Failed to clean the queue.' 
Example 3
Project: grlc   Author: CLARIAH   File: fileLoaders.py    License: MIT License 6 votes vote down vote up
def __init__(self, spec_url):
        """Create a new URLLoader.

        Keyword arguments:
        spec_url -- URL where the specification YAML file is located."""
        headers = {'Accept' : 'text/yaml'}
        resp = requests.get(spec_url, headers=headers)
        if resp.status_code == 200:
            self.spec = yaml.load(resp.text)
            self.spec['url'] = spec_url
            self.spec['files'] = {}
            for queryUrl in self.spec['queries']:
                queryNameExt = path.basename(queryUrl)
                queryName = path.splitext(queryNameExt)[0] # Remove extention
                item = {
                    'name': queryName,
                    'download_url': queryUrl
                }
                self.spec['files'][queryNameExt] = item
            del self.spec['queries']
        else:
            raise Exception(resp.text) 
Example 4
Project: alibuild   Author: alisw   File: build.py    License: GNU General Public License v3.0 6 votes vote down vote up
def syncToLocal(self, p, spec):
    debug("Updating remote store for package %s@%s" % (p, spec["hash"]))
    cmd = format(
                 "mkdir -p %(tarballHashDir)s\n"
                 "s3cmd sync -s -v --host s3.cern.ch --host-bucket %(b)s.s3.cern.ch s3://%(b)s/%(storePath)s/ %(tarballHashDir)s/ 2>/dev/null || true\n"
                 "for x in `s3cmd ls -s --host s3.cern.ch --host-bucket %(b)s.s3.cern.ch s3://%(b)s/%(linksPath)s/ 2>/dev/null | sed -e 's|.*s3://|s3://|'`; do"
                 "  mkdir -p '%(tarballLinkDir)s'; find '%(tarballLinkDir)s' -type l -delete;"
                 "  ln -sf `s3cmd get -s --host s3.cern.ch --host-bucket %(b)s.s3.cern.ch $x - 2>/dev/null` %(tarballLinkDir)s/`basename $x` || true\n"
                 "done",
                 b=self.remoteStore,
                 storePath=spec["storePath"],
                 linksPath=spec["linksPath"],
                 tarballHashDir=spec["tarballHashDir"],
                 tarballLinkDir=spec["tarballLinkDir"])
    err = execute(cmd)
    dieOnError(err, "Unable to update from specified store.") 
Example 5
Project: calmjs   Author: calmjs   File: test_toolchain.py    License: GNU General Public License v2.0 6 votes vote down vote up
def test_toolchain_standard_build_dir_remapped(self):
        """
        This can either be caused by relative paths or symlinks.  Will
        result in the manually specified build_dir being remapped to its
        real location
        """

        fake = mkdtemp(self)
        real = mkdtemp(self)
        real_base = basename(real)
        spec = Spec()
        spec['build_dir'] = join(fake, pardir, real_base)

        with pretty_logging(stream=StringIO()) as s:
            with self.assertRaises(NotImplementedError):
                self.toolchain(spec)

        self.assertIn("realpath of 'build_dir' resolved to", s.getvalue())
        self.assertEqual(spec['build_dir'], real) 
Example 6
Project: calmjs   Author: calmjs   File: test_toolchain.py    License: GNU General Public License v2.0 6 votes vote down vote up
def test_transpiler_sourcemap(self):
        # a kind of silly test but shows concept
        build_dir = mkdtemp(self)
        srcdir = mkdtemp(self)
        js_code = 'var dummy = function() {\n};\n'
        source = join(srcdir, 'source.js')
        target = 'target.js'

        with open(source, 'w') as fd:
            fd.write(js_code)

        spec = Spec(build_dir=build_dir, generate_source_map=True)
        modname = 'dummy'
        self.toolchain.transpile_modname_source_target(
            spec, modname, source, target)

        with open(join(build_dir, target + '.map')) as fd:
            result = json.load(fd)

        self.assertEqual(result['mappings'], 'AAAA;AACA;')
        self.assertEqual(len(result['sources']), 1)
        self.assertEqual(basename(result['sources'][0]), 'source.js')
        self.assertEqual(result['file'], target) 
Example 7
Project: calmjs   Author: calmjs   File: artifact.py    License: GNU General Public License v2.0 6 votes vote down vote up
def generate_metadata_entry(self, entry_point, toolchain, spec):
        """
        After the toolchain and spec have been executed, this may be
        called to generate the artifact export entry for persistence
        into the metadata file.
        """

        export_target = spec['export_target']
        toolchain_bases = trace_toolchain(toolchain)
        toolchain_bin_path = spec.get(TOOLCHAIN_BIN_PATH)
        toolchain_bin = ([
            basename(toolchain_bin_path),  # bin_name
            get_bin_version_str(toolchain_bin_path),  # bin_version
        ] if toolchain_bin_path else [])

        return {basename(export_target): {
            'toolchain_bases': toolchain_bases,
            'toolchain_bin': toolchain_bin,
            'builder': '%s:%s' % (
                entry_point.module_name, '.'.join(entry_point.attrs)),
        }} 
Example 8
Project: mx   Author: graalvm   File: mx_ide_eclipse.py    License: GNU General Public License v2.0 6 votes vote down vote up
def _get_eclipse_output_path(p, linkedResources=None):
    """
    Gets the Eclipse path attribute value for the output of project `p`.
    """
    outputDirRel = p.output_dir(relative=True)
    if outputDirRel.startswith('..'):
        outputDirName = basename(outputDirRel)
        if linkedResources is not None:
            linkedResources.append(_eclipse_linked_resource(outputDirName, '2', p.output_dir()))
        return outputDirName
    else:
        return outputDirRel

#: Highest Execution Environment defined by most recent Eclipse release.
#: https://wiki.eclipse.org/Execution_Environments
#: https://git.eclipse.org/c/jdt/eclipse.jdt.debug.git/plain/org.eclipse.jdt.launching/plugin.properties 
Example 9
Project: mx   Author: graalvm   File: mx_unittest.py    License: GNU General Public License v2.0 6 votes vote down vote up
def _write_cached_testclasses(cachesDir, jar, jdk, testclasses, excludedclasses):
    """
    Writes `testclasses` to a cache file specific to `jar`.

    :param str cachesDir: directory containing files with cached test lists
    :param JDKConfig jdk: the JDK for which the cached list of classes must be written
    :param list testclasses: a list of test class names
    :param list excludedclasses: a list of excluded class names
    """
    jdkVersion = '.jdk' + str(jdk.javaCompliance)
    cache = join(cachesDir, basename(jar) + jdkVersion + '.testclasses')
    exclusions = join(cachesDir, basename(jar) + jdkVersion + '.excludedclasses')
    try:
        with open(cache, 'w') as fp:
            for classname in testclasses:
                print(classname, file=fp)
        with open(exclusions, 'w') as fp:
            if excludedclasses:
                mx.warn('Unsupported class files listed in ' + exclusions)
            for classname in excludedclasses:
                print(classname[1:], file=fp)
    except IOError as e:
        mx.warn('Error writing to ' + cache + ': ' + str(e)) 
Example 10
Project: recipe-box   Author: rtlee9   File: get_recipes.py    License: MIT License 6 votes vote down vote up
def get_fn_recipe_links():

    letter_links = get_fn_letter_links()
    recipe_links = {}
    page_tracker = 0

    for page in letter_links:
        recipe_set = True
        page_num = 1
        lag0 = 0
        while recipe_set:
            t0 = time.time()
            recipe_set = get_all_recipes_fn(path.basename(page), page_num)
            lag1 = time.time() - t0
            recipe_links[page_tracker] = []
            recipe_links[page_tracker].extend(recipe_set)
            page_num += 1
            page_tracker += 1
            time.sleep(lag1 * .5 + lag0 * .5)
            lag0 = lag1

    return recipe_links 
Example 11
Project: imgcomp-cvpr   Author: fab-jul   File: logdir_helpers.py    License: GNU General Public License v3.0 6 votes vote down vote up
def config_paths_from_log_dir(log_dir, base_dirs):
    log_dir = path.basename(log_dir.strip(path.sep))

    # log_dir == {now} {netconfig} {probconfig} [RESTORE@some_dir@XXXX_YYYY], get [netconfig, probconfig]
    comps = log_dir.split(' ')
    assert is_log_date(comps[0]), 'Invalid log_dir: {}'.format(log_dir)
    comps = [c for c in comps[1:] if _RESTORE_PREFIX not in c]
    assert len(comps) <= len(base_dirs), 'Expected as many config components as base dirs: {}, {}'.format(
            comps, base_dirs)

    def get_real_path(base, prepped_p):
        p_glob = prepped_p.replace('@', path.sep)
        p_glob = path.join(base, p_glob)  # e.g., ae_configs/p_glob
        glob_matches = glob.glob(p_glob)
        # We always only replace one character with *, so filter for those.
        # I.e. lr1e-5 will become lr1e*5, which will match lr1e-5 but also lr1e-4.5
        glob_matches_of_same_len = [g for g in glob_matches if len(g) == len(p_glob)]
        if len(glob_matches_of_same_len) != 1:
            raise ValueError('Cannot find config on disk: {} (matches: {})'.format(p_glob, glob_matches_of_same_len))
        return glob_matches_of_same_len[0]

    return tuple(get_real_path(base_dir, comp) for base_dir, comp in zip(base_dirs, comps)) 
Example 12
Project: imgcomp-cvpr   Author: fab-jul   File: purge_checkpoints.py    License: GNU General Public License v3.0 6 votes vote down vote up
def purge_checkpoints(log_dir_root, target_dir, verbose):
    vprint = print if verbose else no_op.NoOp
    ckpt_dir_glob = Saver.ckpt_dir_for_log_dir(path.join(log_dir_root, '*'))
    ckpt_dir_matches = sorted(glob.glob(ckpt_dir_glob))
    for ckpt_dir in ckpt_dir_matches:
        log_dir = Saver.log_dir_from_ckpt_dir(ckpt_dir)
        all_ckpts = Saver.all_ckpts_with_iterations(ckpt_dir)
        if len(all_ckpts) <= 5:
            vprint('Skipping {}'.format(log_dir))
            continue
        target_log_dir = path.join(target_dir, path.basename(log_dir))
        target_ckpt_dir = Saver.ckpt_dir_for_log_dir(target_log_dir)
        os.makedirs(target_ckpt_dir, exist_ok=True)
        ckpts_to_keep = {all_ckpts[2], all_ckpts[len(all_ckpts) // 2], all_ckpts[-1]}
        ckpts_to_move = set(all_ckpts) - ckpts_to_keep
        vprint('Moving to {}:'.format(target_ckpt_dir))
        for _, ckpt_to_move in ckpts_to_move:
            # ckpt_to_move is /path/to/dir/ckpt-7000, add a * to match ckpt-7000.data, .meta, .index
            for ckpt_file in glob.glob(ckpt_to_move + '*'):
                vprint('- {}'.format(ckpt_file))
                shutil.move(ckpt_file, target_ckpt_dir) 
Example 13
Project: Authenticator   Author: bilelmoussaoui   File: gnupg.py    License: GNU General Public License v2.0 6 votes vote down vote up
def __on_apply(self, *__):
        from ...models import BackupJSON
        try:
            paraphrase = self.paraphrase_widget.entry.get_text()
            if not paraphrase:
                paraphrase = " "
            output_file = path.join(GLib.get_user_cache_dir(),
                                    path.basename(NamedTemporaryFile().name))
            status = GPG.get_default().decrypt_json(self._filename, paraphrase, output_file)
            if status.ok:
                BackupJSON.import_file(output_file)
                self.destroy()
            else:
                self.__send_notification(_("There was an error during the import of the encrypted file."))

        except AttributeError:
            Logger.error("[GPG] Invalid JSON file.") 
Example 14
Project: selfmailbot   Author: f213   File: app.py    License: MIT License 6 votes vote down vote up
def send_photo(bot, update: Update, user: User, render):
    file = update.message.photo[-1].get_file()
    photo = download(file)
    subject = 'Photo note to self'
    text = ''

    if update.message.caption is not None:
        text = update.message.caption.strip()
        if text:
            subject = 'Photo: {}'.format(get_subject(text))

    update.message.reply_text(text=render('photo_is_sent'))

    tasks.send_file.delay(
        user_id=user.pk,
        file=photo,
        filename=basename(file.file_path),
        subject=subject,
        text=text,
    ) 
Example 15
Project: gist-alfred   Author: danielecook   File: GitRelease.py    License: MIT License 6 votes vote down vote up
def upload_asset(self, path, label="", content_type=""):
        """
        :calls: `POST https://<upload_url>/repos/:owner/:repo/releases/:release_id/assets?name=foo.zip <https://developer.github.com/v3/repos/releases/#upload-a-release-asset>`_
        :rtype: :class:`github.GitReleaseAsset.GitReleaseAsset`
        """
        assert isinstance(path, (str, unicode)), path
        assert isinstance(label, (str, unicode)), label

        post_parameters = {
            "name": basename(path),
            "label": label
        }
        headers = {}
        if len(content_type) > 0:
            headers["Content-Type"] = content_type
        resp_headers, data = self._requester.requestBlobAndCheck(
            "POST",
            self.upload_url.split("{?")[0],
            parameters=post_parameters,
            headers=headers,
            input=path
        )
        return github.GitReleaseAsset.GitReleaseAsset(self._requester, resp_headers, data, completed=True) 
Example 16
Project: MaskTrack   Author: omkar13   File: base.py    License: MIT License 6 votes vote down vote up
def __init__(self,path,regex,load_func=None):
    super(BaseLoader, self).__init__(
        osp.join(path + '/' + regex),load_func=load_func)

    # Sequence name
    self.name = osp.basename(path)

    # Check sequence name
    if not self.name in cfg.SEQUENCES:
        raise Exception("Sequence name \'{}\' not found.".format(self.name))

    # Check sequence length
    if len(self) != cfg.SEQUENCES[self.name].num_frames:
      raise Exception("Incorrect frames number for sequence" +
          " \'{}\': found {}, expected {}.".format(
            self.name,len(self),cfg.SEQUENCES[self.name].num_frames)) 
Example 17
Project: snowflake-connector-python   Author: snowflakedb   File: dump_ocsp_response.py    License: Apache License 2.0 6 votes vote down vote up
def main():
    """Internal Tool: OCSP response dumper."""

    def help():
        print(
            "Dump OCSP Response for the URL. ")
        print("""
Usage: {} <url> [<url> ...]
""".format(path.basename(sys.argv[0])))
        sys.exit(2)

    import sys
    if len(sys.argv) < 2:
        help()

    urls = sys.argv[1:]
    dump_ocsp_response(urls, output_filename=None) 
Example 18
Project: Traffic_sign_detection_YOLO   Author: AmeyaWagh   File: loader.py    License: MIT License 5 votes vote down vote up
def model_name(file_path):
    file_name = basename(file_path)
    ext = str()
    if '.' in file_name: # exclude extension
        file_name = file_name.split('.')
        ext = file_name[-1]
        file_name = '.'.join(file_name[:-1])
    if ext == str() or ext == 'meta': # ckpt file
        file_name = file_name.split('-')
        num = int(file_name[-1])
        return '-'.join(file_name[:-1])
    if ext == 'weights':
        return file_name 
Example 19
Project: Traffic_sign_detection_YOLO   Author: AmeyaWagh   File: framework.py    License: MIT License 5 votes vote down vote up
def __init__(self, meta, FLAGS):
        model = basename(meta['model'])
        model = '.'.join(model.split('.')[:-1])
        meta['name'] = model
        
        self.constructor(meta, FLAGS) 
Example 20
Project: grlc   Author: CLARIAH   File: fileLoaders.py    License: MIT License 5 votes vote down vote up
def getTextFor(self, fileItem):
        """Returns the contents of the given file item on the specification."""
        # TODO: tiene sentido esto? O es un hack horrible ?
        nameExt = path.basename(fileItem['download_url'])
        return self._getText(nameExt) 
Example 21
Project: spleeter   Author: deezer   File: test_separator.py    License: MIT License 5 votes vote down vote up
def test_separate_to_file(test_file, configuration, backend):
    """ Test file based separation. """
    with tf.Session() as sess:
        instruments = MODEL_TO_INST[configuration]
        separator = Separator(configuration, stft_backend=backend)
        name = splitext(basename(test_file))[0]
        with TemporaryDirectory() as directory:
            separator.separate_to_file(
                test_file,
                directory)
            for instrument in instruments:
                assert exists(join(
                    directory,
                    '{}/{}.wav'.format(name, instrument))) 
Example 22
Project: spleeter   Author: deezer   File: test_separator.py    License: MIT License 5 votes vote down vote up
def test_filename_format(test_file, configuration, backend):
    """ Test custom filename format. """
    with tf.Session() as sess:
        instruments = MODEL_TO_INST[configuration]
        separator = Separator(configuration, stft_backend=backend)
        name = splitext(basename(test_file))[0]
        with TemporaryDirectory() as directory:
            separator.separate_to_file(
                test_file,
                directory,
                filename_format='export/{filename}/{instrument}.{codec}')
            for instrument in instruments:
                assert exists(join(
                    directory,
                    'export/{}/{}.wav'.format(name, instrument))) 
Example 23
Project: VSE-C   Author: ExplorerFreda   File: cli.py    License: MIT License 5 votes vote down vote up
def escape_desc_name(filename):
    basename = osp.basename(filename)
    if basename.endswith('.py'):
        basename = basename[:-3]
    name = basename.replace('.', '_')
    return name 
Example 24
Project: NiBetaSeries   Author: HBClab   File: test_base.py    License: MIT License 5 votes vote down vote up
def test_check_bs_len(fnames, lengths, expected_out, tmp_path):
    affine = np.eye(4)
    fpaths = []
    for fname, length in zip(fnames, lengths):
        fpath = tmp_path / fname
        nib.Nifti2Image(np.zeros((1, 1, 1, length)), affine=affine).to_filename(str(fpath))
        fpaths.append(str(fpath))

    if all(i < 3 for i in lengths):
        with pytest.raises(RuntimeError) as rterr:
            _check_bs_len(fpaths)
        assert "None of the beta series" in str(rterr.value)
    else:
        assert [op.basename(f) for f in _check_bs_len(fpaths)] == expected_out 
Example 25
Project: google_streetview   Author: rrwen   File: api.py    License: MIT License 5 votes vote down vote up
def download_links(self, dir_path, metadata_file='metadata.json', metadata_status='status', status_ok='OK'):
    """Download Google Street View images from parameter queries if they are available.
    
    Args:
      dir_path (str):
        Path of directory to save downloads of images from :class:`api.results`.links
      metadata_file (str):
         Name of the file with extension to save the :class:`api.results`.metadata
      metadata_status (str):
        Key name of the status value from :class:`api.results`.metadata response from the metadata API request.
      status_ok (str):
        Value from the metadata API response status indicating that an image is available.
    """
    metadata = self.metadata
    if not path.isdir(dir_path):
      makedirs(dir_path)
    
    # (download) Download images if status from metadata is ok
    for i, url in enumerate(self.links):
      if metadata[i][metadata_status] == status_ok:
        file_path = path.join(dir_path, 'gsv_' + str(i) + '.jpg')
        metadata[i]['_file'] = path.basename(file_path) # add file reference
        helpers.download(url, file_path)
    
    # (metadata) Save metadata with file reference
    metadata_path = path.join(dir_path, metadata_file)
    with open(metadata_path, 'w') as out_file:
      json.dump(metadata, out_file) 
Example 26
Project: delocate   Author: matthew-brett   File: delocate_patch.py    License: BSD 2-Clause "Simplified" License 5 votes vote down vote up
def main():
    parser = OptionParser(
        usage="%s WHEEL_FILENAME PATCH_FNAME\n\n" % sys.argv[0] + __doc__,
        version="%prog " + __version__)
    parser.add_option(
        Option("-w", "--wheel-dir",
               action="store", type='string',
               help="Directory to store patched wheel (default is to "
               "overwrite input)"))
    parser.add_option(
        Option("-v", "--verbose",
               action="store_true",
               help="Print input and output wheels"))
    (opts, args) = parser.parse_args()
    if len(args) != 2:
        parser.print_help()
        sys.exit(1)
    wheel, patch_fname = args
    if opts.wheel_dir:
        wheel_dir = expanduser(opts.wheel_dir)
        if not exists(wheel_dir):
            os.makedirs(wheel_dir)
    else:
        wheel_dir = None
    if opts.verbose:
        print('Patching: {0} with {1}'.format(wheel, patch_fname))
    if wheel_dir:
        out_wheel = pjoin(wheel_dir, basename(wheel))
    else:
        out_wheel = wheel
    patch_wheel(wheel, patch_fname, out_wheel)
    if opts.verbose:
        print("Patched wheel {0} to {1}:".format(
            wheel, out_wheel)) 
Example 27
Project: Servo   Author: fpsw   File: product.py    License: BSD 2-Clause "Simplified" License 5 votes vote down vote up
def update_photo(self):
        """
        Updates this product image with the GSX part image
        """
        if self.component_code and not self.photo:
            try:
                part = parts.Part(partNumber=self.code)
                result = part.fetch_image()
                filename = basename(result)
                self.photo.save(filename, File(open(result)))
            except Exception as e:
                print e 
Example 28
Project: lirpg   Author: Hwhitetooth   File: logger.py    License: MIT License 5 votes vote down vote up
def read_tb(path):
    """
    path : a tensorboard file OR a directory, where we will find all TB files
           of the form events.*
    """
    import pandas
    import numpy as np
    from glob import glob
    from collections import defaultdict
    import tensorflow as tf
    if osp.isdir(path):
        fnames = glob(osp.join(path, "events.*"))
    elif osp.basename(path).startswith("events."):
        fnames = [path]
    else:
        raise NotImplementedError("Expected tensorboard file or directory containing them. Got %s"%path)
    tag2pairs = defaultdict(list)
    maxstep = 0
    for fname in fnames:
        for summary in tf.train.summary_iterator(fname):
            if summary.step > 0:
                for v in summary.summary.value:
                    pair = (summary.step, v.simple_value)
                    tag2pairs[v.tag].append(pair)
                maxstep = max(summary.step, maxstep)
    data = np.empty((maxstep, len(tag2pairs)))
    data[:] = np.nan
    tags = sorted(tag2pairs.keys())
    for (colidx,tag) in enumerate(tags):
        pairs = tag2pairs[tag]
        for (step, value) in pairs:
            data[step-1, colidx] = value
    return pandas.DataFrame(data, columns=tags) 
Example 29
Project: alibuild   Author: alisw   File: clean.py    License: GNU General Public License v3.0 5 votes vote down vote up
def decideClean(workDir, architecture, aggressiveCleanup):
  """ Decides what to delete, without actually doing it:
      - Find all the symlinks in "BUILD"
      - Find all the directories in "BUILD"
      - Schedule a directory for deletion if it does not have a symlink
  """
  symlinksBuild = [os.readlink(x) for x in glob.glob("%s/BUILD/*-latest*" % workDir)]
  # $WORK_DIR/TMP should always be cleaned up. This does not happen only
  # in the case we run out of space while unpacking.
  # $WORK_DIR/<architecture>/store can be cleaned up as well, because
  # we do not need the actual tarballs after they have been built.
  toDelete = ["%s/TMP" % workDir]
  if aggressiveCleanup:
    toDelete += ["%s/TARS/%s/store" % (workDir, architecture),
                 "%s/SOURCES" % (workDir)]
  allBuildStuff = glob.glob("%s/BUILD/*" % workDir)
  toDelete += [x for x in allBuildStuff
               if not path.islink(x) and not basename(x) in symlinksBuild]
  installGlob ="%s/%s/*/" % (workDir, architecture)
  installedPackages = set([dirname(x) for x in glob.glob(installGlob)])
  symlinksInstall = []
  for x in installedPackages:
    symlinksInstall += [path.realpath(y) for y in glob.glob(x + "/latest*")]
  toDelete += [x for x in glob.glob(installGlob+ "*")
               if not path.islink(x) and not path.realpath(x) in symlinksInstall]
  toDelete = [x for x in toDelete if path.exists(x)]
  return toDelete 
Example 30
Project: alibuild   Author: alisw   File: build.py    License: GNU General Public License v3.0 5 votes vote down vote up
def star():
  return re.sub("build.*$", "", basename(sys.argv[0]).lower())