Python os.remove() Examples

The following are 50 code examples for showing how to use os.remove(). They are extracted from open source Python projects. You can vote up the examples you like or vote down the exmaples you don't like. You can also save this page to your account.

Example 1
Project: deep-summarization   Author: harpribot   File: checkpoint.py    (MIT License) View Source Project 10 votes vote down vote up
def delete_previous_checkpoints(self, num_previous=5):
        """
        Deletes all previous checkpoints that are <num_previous> before the present checkpoint.
        This is done to prevent blowing out of memory due to too many checkpoints
        
        :param num_previous:
        :return:
        """
        self.present_checkpoints = glob.glob(self.get_checkpoint_location() + '/*.ckpt')
        if len(self.present_checkpoints) > num_previous:
            present_ids = [self.__get_id(ckpt) for ckpt in self.present_checkpoints]
            present_ids.sort()
            ids_2_delete = present_ids[0:len(present_ids) - num_previous]
            for ckpt_id in ids_2_delete:
                ckpt_file_nm = self.get_checkpoint_location() + '/model_' + str(ckpt_id) + '.ckpt'
                os.remove(ckpt_file_nm) 
Example 2
Project: sensor21   Author: 21dotco   File: sensor21-server.py    (license) View Source Project 7 votes vote down vote up
def run(daemon):
        if daemon:
            pid_file = './sensor21.pid'
            if os.path.isfile(pid_file):
                pid = int(open(pid_file).read())
                os.remove(pid_file)
                try:
                    p = psutil.Process(pid)
                    p.terminate()
                except:
                    pass
            try:
                p = subprocess.Popen(['python3', 'sensor21-server.py'])
                open(pid_file, 'w').write(str(p.pid))
            except subprocess.CalledProcessError:
                raise ValueError("error starting sensor21-server.py daemon")
        else:
            print("Server running...")
            app.run(host='::', port=5002) 
Example 3
Project: python-   Author: secondtonone1   File: download.py    (license) View Source Project 7 votes vote down vote up
def _copy_file(filename, location, link):
    copy = True
    download_location = os.path.join(location, link.filename)
    if os.path.exists(download_location):
        response = ask_path_exists(
            'The file %s exists. (i)gnore, (w)ipe, (b)ackup, (a)abort' %
            display_path(download_location), ('i', 'w', 'b', 'a'))
        if response == 'i':
            copy = False
        elif response == 'w':
            logger.warning('Deleting %s', display_path(download_location))
            os.remove(download_location)
        elif response == 'b':
            dest_file = backup_dir(download_location)
            logger.warning(
                'Backing up %s to %s',
                display_path(download_location),
                display_path(dest_file),
            )
            shutil.move(download_location, dest_file)
        elif response == 'a':
            sys.exit(-1)
    if copy:
        shutil.copy(filename, download_location)
        logger.info('Saved %s', display_path(download_location)) 
Example 4
Project: geo-pyprint   Author: ioda-net   File: __init__.py    (MIT License) View Source Project 7 votes vote down vote up
def mapprint(request):
    payload = request.json_body

    output_file_name = MapPrint(payload).print_pdf()

    response = FileResponse(
        output_file_name,
        request=request
    )
    response.headers['Content-Disposition'] = ('attachement; filename="{}"'
                                               .format(output_file_name + '.pdf'))
    response.headers['Content-Type'] = 'application/pdf'

    if os.path.exists(output_file_name):
        os.remove(output_file_name)

    return response 
Example 5
Project: pycos   Author: pgiri   File: dispycos_client9_server.py    (license) View Source Project 6 votes vote down vote up
def setup_server(data_file, task=None):  # executed on remote server
    # variables declared as 'global' will be available in tasks for read/write
    # to all computations on a server.
    global hashlib, data, file_name
    import os, hashlib
    file_name = data_file
    print('%s processing %s' % (task.location, data_file))
    # note that files transferred to server are in the directory where
    # computations are executed (cf 'node_setup' in dispycos_client9_node.py)
    with open(data_file, 'rb') as fd:
        data = fd.read()
    os.remove(data_file)  # data_file is not needed anymore
    # generator functions must have at least one 'yield'
    yield 0 # indicate successful initialization with exit value 0

# 'compute' is executed at remote server process repeatedly to compute checksum
# of data in memory, initialized by 'setup_server' 
Example 6
Project: pycos   Author: pgiri   File: dispycos_client9_node.py    (license) View Source Project 6 votes vote down vote up
def node_setup(data_file):
    # 'node_setup' is executed on a node with the arguments returned by
    # 'node_available'. This task should return 0 to indicate successful
    # initialization.

    # variables declared as 'global' will be available (as read-only) in tasks.
    global os, hashlib, data, file_name
    import os, hashlib
    # note that files transferred to node are in parent directory of cwd where
    # each computation is run (in case such files need to be accessed in
    # computation).
    print('data_file: "%s"' % data_file)
    with open(data_file, 'rb') as fd:
        data = fd.read()
    os.remove(data_file)  # data_file is not needed anymore
    file_name = data_file
    yield 0  # task must have at least one 'yield' and 0 indicates success

# 'compute' is executed at remote server process repeatedly to compute checksum
# of data in memory, initialized by 'node_setup' 
Example 7
Project: pycos   Author: pgiri   File: dispycos_client9_node.py    (license) View Source Project 6 votes vote down vote up
def node_setup(data_file):
    # 'node_setup' is executed on a node with the arguments returned by
    # 'node_available'. This task should return 0 to indicate successful
    # initialization.

    # variables declared as 'global' will be available (as read-only) in tasks.
    global os, hashlib, data, file_name
    import os, hashlib
    # note that files transferred to node are in parent directory of cwd where
    # each computation is run (in case such files need to be accessed in
    # computation).
    print('data_file: "%s"' % data_file)
    with open(data_file, 'rb') as fd:
        data = fd.read()
    os.remove(data_file)  # data_file is not needed anymore
    file_name = data_file
    yield 0  # task must have at least one 'yield' and 0 indicates success

# 'compute' is executed at remote server process repeatedly to compute checksum
# of data in memory, initialized by 'node_setup' 
Example 8
Project: pycos   Author: pgiri   File: dispycos_client9_server.py    (license) View Source Project 6 votes vote down vote up
def setup_server(data_file, task=None):  # executed on remote server
    # variables declared as 'global' will be available in tasks for read/write
    # to all computations on a server.
    global hashlib, data, file_name
    import os, hashlib
    file_name = data_file
    print('%s processing %s' % (task.location, data_file))
    # note that files transferred to server are in the directory where
    # computations are executed (cf 'node_setup' in dispycos_client9_node.py)
    with open(data_file, 'rb') as fd:
        data = fd.read()
    os.remove(data_file)  # data_file is not needed anymore
    # generator functions must have at least one 'yield'
    yield 0 # indicate successful initialization with exit value 0

# 'compute' is executed at remote server process repeatedly to compute checksum
# of data in memory, initialized by 'setup_server' 
Example 9
Project: charm-plumgrid-gateway   Author: openstack   File: ceph.py    (Apache License 2.0) View Source Project 6 votes vote down vote up
def remove_cache_tier(self, cache_pool):
        """
        Removes a cache tier from Ceph.  Flushes all dirty objects from writeback pools and waits for that to complete.
        :param cache_pool: six.string_types.  The cache tier pool name to remove.
        :return: None
        """
        # read-only is easy, writeback is much harder
        mode = get_cache_mode(self.service, cache_pool)
        version = ceph_version()
        if mode == 'readonly':
            check_call(['ceph', '--id', self.service, 'osd', 'tier', 'cache-mode', cache_pool, 'none'])
            check_call(['ceph', '--id', self.service, 'osd', 'tier', 'remove', self.name, cache_pool])

        elif mode == 'writeback':
            pool_forward_cmd = ['ceph', '--id', self.service, 'osd', 'tier',
                                'cache-mode', cache_pool, 'forward']
            if version >= '10.1':
                # Jewel added a mandatory flag
                pool_forward_cmd.append('--yes-i-really-mean-it')

            check_call(pool_forward_cmd)
            # Flush the cache and wait for it to return
            check_call(['rados', '--id', self.service, '-p', cache_pool, 'cache-flush-evict-all'])
            check_call(['ceph', '--id', self.service, 'osd', 'tier', 'remove-overlay', self.name])
            check_call(['ceph', '--id', self.service, 'osd', 'tier', 'remove', self.name, cache_pool]) 
Example 10
Project: alfred-mpd   Author: deanishe   File: workflow.py    (license) View Source Project 6 votes vote down vote up
def unregister(self, name):
        """Remove registered serializer with ``name``.

        Raises a :class:`ValueError` if there is no such registered
        serializer.

        :param name: Name of serializer to remove
        :type name: ``unicode`` or ``str``
        :returns: serializer object

        """
        if name not in self._serializers:
            raise ValueError('No such serializer registered : {0}'.format(
                             name))

        serializer = self._serializers[name]
        del self._serializers[name]

        return serializer 
Example 11
Project: alfred-mpd   Author: deanishe   File: workflow.py    (license) View Source Project 6 votes vote down vote up
def atomic_writer(file_path, mode):
    """Atomic file writer.

    :param file_path: path of file to write to.
    :type file_path: ``unicode``
    :param mode: sames as for `func:open`
    :type mode: string

    .. versionadded:: 1.12

    Context manager that ensures the file is only written if the write
    succeeds. The data is first written to a temporary file.

    """
    temp_suffix = '.aw.temp'
    temp_file_path = file_path + temp_suffix
    with open(temp_file_path, mode) as file_obj:
        try:
            yield file_obj
            os.rename(temp_file_path, file_path)
        finally:
            try:
                os.remove(temp_file_path)
            except (OSError, IOError):
                pass 
Example 12
Project: txt2evernote   Author: Xunius   File: tools.py    (GNU General Public License v3.0) View Source Project 6 votes vote down vote up
def saveFile(abpath_out,text,overwrite=True,verbose=True):

    abpath_out=expandUser(abpath_out)
    if os.path.isfile(abpath_out):
        if overwrite:
            os.remove(abpath_out)
        else:
            abpath_out=autoRename(abpath_out)

    if verbose:
        print('\n# <saveFile>: Saving result to:')
        print(abpath_out)

    with open(abpath_out, mode='a') as fout:
        fout.write(enu(text))

    return
        

        

#------------------Expand user home "~" in file names------------------ 
Example 13
Project: txt2evernote   Author: Xunius   File: gnsync.py    (GNU General Public License v3.0) View Source Project 6 votes vote down vote up
def reset_logpath(logpath):
    """
    Reset logpath to path from command line
    """
    global logger

    if not logpath:
        return

    # remove temporary log file if it's empty
    if os.path.isfile(def_logpath):
        if os.path.getsize(def_logpath) == 0:
            os.remove(def_logpath)

    # save previous handlers
    handlers = logger.handlers

    # remove old handlers
    for handler in handlers:
        logger.removeHandler(handler)

    # try to set new file handler
    handler = logging.FileHandler(logpath)
    handler.setFormatter(formatter)
    logger.addHandler(handler) 
Example 14
Project: Zoom2Youtube   Author: Welltory   File: youtube.py    (MIT License) View Source Project 6 votes vote down vote up
def upload_from_dir(self, video_dir: str):
        assert os.path.isdir(video_dir), "Not found directory"
        files = self._get_files_from_dir(video_dir, 'mp4')
        for fname in files:
            fpath = os.path.join(video_dir, fname)
            if not os.path.exists(fpath):
                continue
            title = os.path.splitext(os.path.basename(fname))[0]
            options = dict(
                file=fpath,
                title=title,
                privacyStatus='unlisted',
            )
            video_id = self.upload_video(options)
            if not video_id:
                continue

            video_url = 'https://www.youtube.com/watch?v={}'.format(video_id)
            print('File uploaded: {}'.format(video_url))
            message = '{} - {}'.format(title, video_url)
            self.notify(message)
            os.remove(fpath) 
Example 15
Project: python-   Author: secondtonone1   File: util.py    (license) View Source Project 6 votes vote down vote up
def get_resources_dests(resources_root, rules):
    """Find destinations for resources files"""

    def get_rel_path(base, path):
        # normalizes and returns a lstripped-/-separated path
        base = base.replace(os.path.sep, '/')
        path = path.replace(os.path.sep, '/')
        assert path.startswith(base)
        return path[len(base):].lstrip('/')


    destinations = {}
    for base, suffix, dest in rules:
        prefix = os.path.join(resources_root, base)
        for abs_base in iglob(prefix):
            abs_glob = os.path.join(abs_base, suffix)
            for abs_path in iglob(abs_glob):
                resource_file = get_rel_path(resources_root, abs_path)
                if dest is None:  # remove the entry if it was here
                    destinations.pop(resource_file, None)
                else:
                    rel_path = get_rel_path(abs_base, abs_path)
                    rel_dest = dest.replace(os.path.sep, '/').rstrip('/')
                    destinations[resource_file] = rel_dest + '/' + rel_path
    return destinations 
Example 16
Project: python-   Author: secondtonone1   File: util.py    (license) View Source Project 6 votes vote down vote up
def convert_path(pathname):
    """Return 'pathname' as a name that will work on the native filesystem.

    The path is split on '/' and put back together again using the current
    directory separator.  Needed because filenames in the setup script are
    always supplied in Unix style, and have to be converted to the local
    convention before we can actually use them in the filesystem.  Raises
    ValueError on non-Unix-ish systems if 'pathname' either starts or
    ends with a slash.
    """
    if os.sep == '/':
        return pathname
    if not pathname:
        return pathname
    if pathname[0] == '/':
        raise ValueError("path '%s' cannot be absolute" % pathname)
    if pathname[-1] == '/':
        raise ValueError("path '%s' cannot end with '/'" % pathname)

    paths = pathname.split('/')
    while os.curdir in paths:
        paths.remove(os.curdir)
    if not paths:
        return os.curdir
    return os.path.join(*paths) 
Example 17
Project: python-   Author: secondtonone1   File: util.py    (license) View Source Project 6 votes vote down vote up
def rollback(self):
        if not self.dry_run:
            for f in list(self.files_written):
                if os.path.exists(f):
                    os.remove(f)
            # dirs should all be empty now, except perhaps for
            # __pycache__ subdirs
            # reverse so that subdirs appear before their parents
            dirs = sorted(self.dirs_created, reverse=True)
            for d in dirs:
                flist = os.listdir(d)
                if flist:
                    assert flist == ['__pycache__']
                    sd = os.path.join(d, flist[0])
                    os.rmdir(sd)
                os.rmdir(d)     # should fail if non-empty
        self._init_record() 
Example 18
Project: python-   Author: secondtonone1   File: util.py    (license) View Source Project 6 votes vote down vote up
def get_extras(requested, available):
    result = set()
    requested = set(requested or [])
    available = set(available or [])
    if '*' in requested:
        requested.remove('*')
        result |= available
    for r in requested:
        if r == '-':
            result.add(r)
        elif r.startswith('-'):
            unwanted = r[1:]
            if unwanted not in available:
                logger.warning('undeclared extra: %s' % unwanted)
            if unwanted in result:
                result.remove(unwanted)
        else:
            if r not in available:
                logger.warning('undeclared extra: %s' % r)
            result.add(r)
    return result
#
# Extended metadata functionality
# 
Example 19
Project: python-   Author: secondtonone1   File: util.py    (license) View Source Project 6 votes vote down vote up
def get_steps(self, final):
        if not self.is_step(final):
            raise ValueError('Unknown: %r' % final)
        result = []
        todo = []
        seen = set()
        todo.append(final)
        while todo:
            step = todo.pop(0)
            if step in seen:
                # if a step was already seen,
                # move it to the end (so it will appear earlier
                # when reversed on return) ... but not for the
                # final step, as that would be confusing for
                # users
                if step != final:
                    result.remove(step)
                    result.append(step)
            else:
                seen.add(step)
                result.append(step)
                preds = self._preds.get(step, ())
                todo.extend(preds)
        return reversed(result) 
Example 20
Project: facerecognition   Author: guoxiaolu   File: elasticsearch_driver.py    (license) View Source Project 6 votes vote down vote up
def delete_duplicates(self, path):
        """Delete all but one entries in elasticsearch whose `path` value is equivalent to that of path.
           need to modify!!!
        Args:
            path (string): path value to compare to those in the elastic search
        """
        result = self.es.search(body={'query':
                                 {'match':
                                      {'path': path}
                                  }
                             },
                       index=self.index)['hits']['hits']

        matching_paths = []
        matching_thumbnail = []
        for item in result:
            if item['_source']['path'] == path:
                matching_paths.append(item['_id'])
                matching_thumbnail.append(item['_source']['thumbnail'])

        if len(matching_paths) > 0:
            for i, id_tag in enumerate(matching_paths[1:]):
                self.es.delete(index=self.index, doc_type=self.doc_type, id=id_tag)
                if os.path.isfile(matching_thumbnail[i]):
                    os.remove(matching_thumbnail[i]) 
Example 21
Project: lammps-data-file   Author: kbsezginel   File: crystal.py    (MIT License) View Source Project 6 votes vote down vote up
def export(self, export_dir, file_format='xyz'):
        """
        Export MOF atom coordinates and names in .xyz format.

        Example usage:
         >>> mof.export(export_dir, file_format='xyz')
        """
        if file_format == 'xyz':
            xyz_path = os.path.join(export_dir, self.name + '.xyz')
            if os.path.exists(xyz_path):
                os.remove(xyz_path)

            with open(xyz_path, 'w') as xyz_file:
                xyz_file.write(str(len(self.atom_coors)) + '\n')
                xyz_file.write(self.name + '\n')
                for atom, coor in zip(self.atom_names, self.atom_coors):
                    xyz_file.write(atom + ' ' + str(coor[0]) + ' ' + str(coor[1]) + ' ' + str(coor[2]) + '\n')

        else:
            file_path = os.path.join(export_dir, self.name + '.' + file_format)
            ase.write(file_path, self.ase_atoms, file_format=file_format) 
Example 22
Project: rca-evaluation   Author: sieve-microservices   File: metadata.py    (license) View Source Project 6 votes vote down vote up
def _atomic_write(filename):
    path = os.path.dirname(filename)
    try:
        file = tempfile.NamedTemporaryFile(delete=False, dir=path, mode="w+")
        yield file
        file.flush()
        os.fsync(file.fileno())
        os.rename(file.name, filename)
    finally:
        try:
            os.remove(file.name)
        except OSError as e:
            if e.errno == 2:
                pass
            else:
                raise e 
Example 23
Project: ironic-staging-drivers   Author: openstack   File: power.py    (Apache License 2.0) View Source Project 6 votes vote down vote up
def _set_boot_device(conn, domain, device):
    """Set the boot device.

    :param conn: active libvirt connection.
    :param domain: libvirt domain object.
    :raises: LibvirtError if failed update domain xml.
    """

    parsed = ET.fromstring(domain.XMLDesc())
    os = parsed.find('os')
    boot_list = os.findall('boot')

    # Clear boot list
    for boot_el in boot_list:
        os.remove(boot_el)

    boot_el = ET.SubElement(os, 'boot')
    boot_el.set('dev', device)

    try:
        conn.defineXML(ET.tostring(parsed))
    except libvirt.libvirtError as e:
        raise isd_exc.LibvirtError(err=e) 
Example 24
Project: ardy   Author: avara1986   File: test_deploy_integration.py    (Apache License 2.0) View Source Project 6 votes vote down vote up
def test_run_with_alias(self, create_artefact_mock, copytree_mock, pip_install_mock):
        zip_file = MockZipFile.create_zip("test")
        create_artefact_mock.return_value = zip_file

        self.deploy = Deploy(path=os.path.dirname(os.path.abspath(__file__)), filename="config_with_alias.json")

        # TODO: Search why moto rise errors
        try:
            # Create lambdas
            self.deploy.run("myexamplelambdaproject")

            self.assertTrue(pip_install_mock.called)
            self.assertTrue(copytree_mock.called)
            self.assertTrue(create_artefact_mock.called)

            # Update lambdas
            self.deploy.run("myexamplelambdaproject")

        except ConnectionError as e:
            print(e)

        os.remove(zip_file) 
Example 25
Project: ardy   Author: avara1986   File: test_deploy_integration.py    (Apache License 2.0) View Source Project 6 votes vote down vote up
def test_run_with_trigger_s3(self, create_artefact_mock, copytree_mock, pip_install_mock):
        zip_file = MockZipFile.create_zip("test")
        create_artefact_mock.return_value = zip_file

        self.deploy = Deploy(path=os.path.dirname(os.path.abspath(__file__)), filename="config_with_triggers.json",
                             lambdas_to_deploy=["LambdaExample_S3_7", ])
        # TODO: Search why moto rise errors
        try:
            # Create lambdas
            self.deploy.run("myexamplelambdaproject")

            self.assertTrue(pip_install_mock.called)
            self.assertTrue(copytree_mock.called)
            self.assertTrue(create_artefact_mock.called)

            # Update lambdas
            self.deploy.run("myexamplelambdaproject")

        except ConnectionError as e:
            print(e)

        os.remove(zip_file) 
Example 26
Project: my-first-blog   Author: AnkurBegining   File: util.py    (license) View Source Project 6 votes vote down vote up
def get_resources_dests(resources_root, rules):
    """Find destinations for resources files"""

    def get_rel_path(base, path):
        # normalizes and returns a lstripped-/-separated path
        base = base.replace(os.path.sep, '/')
        path = path.replace(os.path.sep, '/')
        assert path.startswith(base)
        return path[len(base):].lstrip('/')


    destinations = {}
    for base, suffix, dest in rules:
        prefix = os.path.join(resources_root, base)
        for abs_base in iglob(prefix):
            abs_glob = os.path.join(abs_base, suffix)
            for abs_path in iglob(abs_glob):
                resource_file = get_rel_path(resources_root, abs_path)
                if dest is None:  # remove the entry if it was here
                    destinations.pop(resource_file, None)
                else:
                    rel_path = get_rel_path(abs_base, abs_path)
                    rel_dest = dest.replace(os.path.sep, '/').rstrip('/')
                    destinations[resource_file] = rel_dest + '/' + rel_path
    return destinations 
Example 27
Project: my-first-blog   Author: AnkurBegining   File: util.py    (license) View Source Project 6 votes vote down vote up
def convert_path(pathname):
    """Return 'pathname' as a name that will work on the native filesystem.

    The path is split on '/' and put back together again using the current
    directory separator.  Needed because filenames in the setup script are
    always supplied in Unix style, and have to be converted to the local
    convention before we can actually use them in the filesystem.  Raises
    ValueError on non-Unix-ish systems if 'pathname' either starts or
    ends with a slash.
    """
    if os.sep == '/':
        return pathname
    if not pathname:
        return pathname
    if pathname[0] == '/':
        raise ValueError("path '%s' cannot be absolute" % pathname)
    if pathname[-1] == '/':
        raise ValueError("path '%s' cannot end with '/'" % pathname)

    paths = pathname.split('/')
    while os.curdir in paths:
        paths.remove(os.curdir)
    if not paths:
        return os.curdir
    return os.path.join(*paths) 
Example 28
Project: my-first-blog   Author: AnkurBegining   File: util.py    (license) View Source Project 6 votes vote down vote up
def rollback(self):
        if not self.dry_run:
            for f in list(self.files_written):
                if os.path.exists(f):
                    os.remove(f)
            # dirs should all be empty now, except perhaps for
            # __pycache__ subdirs
            # reverse so that subdirs appear before their parents
            dirs = sorted(self.dirs_created, reverse=True)
            for d in dirs:
                flist = os.listdir(d)
                if flist:
                    assert flist == ['__pycache__']
                    sd = os.path.join(d, flist[0])
                    os.rmdir(sd)
                os.rmdir(d)     # should fail if non-empty
        self._init_record() 
Example 29
Project: my-first-blog   Author: AnkurBegining   File: util.py    (license) View Source Project 6 votes vote down vote up
def get_extras(requested, available):
    result = set()
    requested = set(requested or [])
    available = set(available or [])
    if '*' in requested:
        requested.remove('*')
        result |= available
    for r in requested:
        if r == '-':
            result.add(r)
        elif r.startswith('-'):
            unwanted = r[1:]
            if unwanted not in available:
                logger.warning('undeclared extra: %s' % unwanted)
            if unwanted in result:
                result.remove(unwanted)
        else:
            if r not in available:
                logger.warning('undeclared extra: %s' % r)
            result.add(r)
    return result
#
# Extended metadata functionality
# 
Example 30
Project: my-first-blog   Author: AnkurBegining   File: util.py    (license) View Source Project 6 votes vote down vote up
def get_steps(self, final):
        if not self.is_step(final):
            raise ValueError('Unknown: %r' % final)
        result = []
        todo = []
        seen = set()
        todo.append(final)
        while todo:
            step = todo.pop(0)
            if step in seen:
                # if a step was already seen,
                # move it to the end (so it will appear earlier
                # when reversed on return) ... but not for the
                # final step, as that would be confusing for
                # users
                if step != final:
                    result.remove(step)
                    result.append(step)
            else:
                seen.add(step)
                result.append(step)
                preds = self._preds.get(step, ())
                todo.extend(preds)
        return reversed(result) 
Example 31
Project: my-first-blog   Author: AnkurBegining   File: download.py    (license) View Source Project 6 votes vote down vote up
def _copy_file(filename, location, link):
    copy = True
    download_location = os.path.join(location, link.filename)
    if os.path.exists(download_location):
        response = ask_path_exists(
            'The file %s exists. (i)gnore, (w)ipe, (b)ackup, (a)abort' %
            display_path(download_location), ('i', 'w', 'b', 'a'))
        if response == 'i':
            copy = False
        elif response == 'w':
            logger.warning('Deleting %s', display_path(download_location))
            os.remove(download_location)
        elif response == 'b':
            dest_file = backup_dir(download_location)
            logger.warning(
                'Backing up %s to %s',
                display_path(download_location),
                display_path(dest_file),
            )
            shutil.move(download_location, dest_file)
        elif response == 'a':
            sys.exit(-1)
    if copy:
        shutil.copy(filename, download_location)
        logger.info('Saved %s', display_path(download_location)) 
Example 32
Project: my-first-blog   Author: AnkurBegining   File: bdist_egg.py    (license) View Source Project 6 votes vote down vote up
def zap_pyfiles(self):
        log.info("Removing .py files from temporary directory")
        for base, dirs, files in walk_egg(self.bdist_dir):
            for name in files:
                path = os.path.join(base, name)

                if name.endswith('.py'):
                    log.debug("Deleting %s", path)
                    os.unlink(path)

                if base.endswith('__pycache__'):
                    path_old = path

                    pattern = r'(?P<name>.+)\.(?P<magic>[^.]+)\.pyc'
                    m = re.match(pattern, name)
                    path_new = os.path.join(base, os.pardir, m.group('name') + '.pyc')
                    log.info("Renaming file from [%s] to [%s]" % (path_old, path_new))
                    try:
                        os.remove(path_new)
                    except OSError:
                        pass
                    os.rename(path_old, path_new) 
Example 33
Project: picoCTF   Author: picoCTF   File: status.py    (MIT License) View Source Project 6 votes vote down vote up
def clean(args, config):
    """ Main entrypoint for clean """

    lock_file = join(HACKSPORTS_ROOT, "deploy.lock")

    # remove staging directories
    if os.path.isdir(STAGING_ROOT):
        logger.info("Removing the staging directories")
        shutil.rmtree(STAGING_ROOT)

    # remove lock file
    if os.path.isfile(lock_file):
        logger.info("Removing the stale lock file")
        os.remove(lock_file)

    #TODO: potentially perform more cleaning 
Example 34
Project: NeoVintageous   Author: NeoVintageous   File: windows.py    (license) View Source Project 6 votes vote down vote up
def filter_region(view, txt, command):
    try:
        contents = tempfile.NamedTemporaryFile(suffix='.txt', delete=False)
        contents.write(txt.encode('utf-8'))
        contents.close()

        script = tempfile.NamedTemporaryFile(suffix='.bat', delete=False)
        script.write(('@echo off\ntype %s | %s' % (contents.name, command)).encode('utf-8'))
        script.close()

        p = subprocess.Popen([script.name],
                             stdout=PIPE,
                             stderr=PIPE,
                             startupinfo=get_startup_info())

        out, err = p.communicate()
        return (out or err).decode(get_oem_cp()).replace('\r\n', '\n')[:-1].strip()
    finally:
        os.remove(script.name)
        os.remove(contents.name) 
Example 35
Project: core-framework   Author: RedhawkSDR   File: test_15_LoggingConfig.py    (license) View Source Project 6 votes vote down vote up
def test_comp_macro_directories_config_python(self):
        file_loc = os.getcwd()
        self.comp = sb.launch(self.cname, impl="python", execparams={'LOGGING_CONFIG_URI':'file://'+os.getcwd()+'/logconfig.cfg'} )
        fp = None
        try:
            fp = open('foo/bar/test.log','r')
        except:
            pass
        try:
            os.remove('foo/bar/test.log')
        except:
            pass
        try:
            os.rmdir('foo/bar')
        except:
            pass
        try:
            os.rmdir('foo')
        except:
            pass
        self.assertNotEquals(fp, None) 
Example 36
Project: core-framework   Author: RedhawkSDR   File: test_15_LoggingConfig.py    (license) View Source Project 6 votes vote down vote up
def test_comp_macro_directories_config_cpp(self):
        file_loc = os.getcwd()
        self.comp = sb.launch(self.cname, impl="cpp", execparams={'LOGGING_CONFIG_URI':'file://'+os.getcwd()+'/logconfig.cfg'} )
        fp = None
        try:
            fp = open('foo/bar/test.log','r')
        except:
            pass
        try:
            os.remove('foo/bar/test.log')
        except:
            pass
        try:
            os.rmdir('foo/bar')
        except:
            pass
        try:
            os.rmdir('foo')
        except:
            pass
        self.assertNotEquals(fp, None) 
Example 37
Project: core-framework   Author: RedhawkSDR   File: test_15_LoggingConfig.py    (license) View Source Project 6 votes vote down vote up
def test_comp_macro_directories_config_java(self):
        file_loc = os.getcwd()
        self.comp = sb.launch(self.cname, impl="java", execparams={'LOGGING_CONFIG_URI':'file://'+os.getcwd()+'/logconfig.cfg'} )
        fp = None
        try:
            fp = open('foo/bar/test.log','r')
        except:
            pass
        try:
            os.remove('foo/bar/test.log')
        except:
            pass
        try:
            os.rmdir('foo/bar')
        except:
            pass
        try:
            os.rmdir('foo')
        except:
            pass
        self.assertNotEquals(fp, None) 
Example 38
Project: core-framework   Author: RedhawkSDR   File: test_07_PythonParsers.py    (license) View Source Project 6 votes vote down vote up
def test_SADParser(self):
        sad = parsers.SADParser.parse("sdr/dom/waveforms/CommandWrapperWithPropertyOverride/CommandWrapper.sad.xml")
        self.assertEqual(sad.get_id(), "DCE:d206ab51-6342-4976-bac3-55e6902f3489")
        self.assertEqual(sad.get_name(), "CommandWrapperWithPropertyOverride")
        self.assertEqual(len(sad.componentfiles.get_componentfile()), 1)
        self.assertEqual(len(sad.partitioning.get_componentplacement()), 1)
        self.assertEqual(sad.partitioning.get_componentplacement()[0].componentfileref.refid, "CommandWrapper_592b8bd6-b011-4468-9417-705af45e907b")
        self.assertEqual(sad.partitioning.get_componentplacement()[0].get_componentinstantiation()[0].id_, "DCE:8c129782-a6a4-4095-8212-757f01de0c09")
        self.assertEqual(sad.partitioning.get_componentplacement()[0].get_componentinstantiation()[0].get_usagename(), "CommandWrapper1")
        self.assertEqual(sad.partitioning.get_componentplacement()[0].get_componentinstantiation()[0].componentproperties.get_simpleref()[0].refid, "DCE:a4e7b230-1d17-4a86-aeff-ddc6ea3df26e")
        self.assertEqual(sad.partitioning.get_componentplacement()[0].get_componentinstantiation()[0].componentproperties.get_simpleref()[0].value, "/bin/date")

        # Verify that we can write the output and still be DTD valid
        tmpfile = tempfile.mktemp()
        try:
            tmp = open(tmpfile, "w")
            sad.export(tmp, 0)
            tmp.close()
            status = self._xmllint(tmpfile, "SAD")
            self.assertEqual(status, 0, "Python parser did not emit DTD compliant XML")
        finally:
            try:
                os.remove(tmpfile)
            except OSError:
                pass 
Example 39
Project: ln2sql   Author: FerreroJeremy   File: ln2sql.py    (GNU General Public License v3.0) View Source Project 5 votes vote down vote up
def remove_json(self, filename="output.json"):
        if os.path.exists(filename):
            os.remove(filename) 
Example 40
Project: pycos   Author: pgiri   File: dispycos_client4.py    (license) View Source Project 5 votes vote down vote up
def rtask_proc(task=None):
    import os
    # receive object from client_proc task
    cobj = yield task.receive()
    if not cobj:
        raise StopIteration
    # Input file is already copied at where this rtask is running (by client).
    # For given input file, create an output file with each line in the output
    # file computed as length of corresponding line in input file
    cobj.result_file = 'result-%s' % cobj.data_file
    with open(cobj.data_file, 'r') as data_fd:
        with open(cobj.result_file, 'w') as result_fd:
            for lineno, line in enumerate(data_fd, start=1):
                result_fd.write('%d: %d\n' % (lineno, len(line)-1))
    # 'sleep' to simulate computing
    yield task.sleep(cobj.n)
    # transfer the result file to client
    status = yield pycos.Pycos().send_file(cobj.client.location, cobj.result_file,
                                                 overwrite=True, timeout=30)
    if status:
        print('Could not send %s to %s' % (cobj.result_file, cobj.client.location))
        cobj.result_file = None
    cobj.client.send(cobj)
    os.remove(cobj.data_file)
    os.remove(cobj.result_file)


# this generator function is used to create local task (at the client) to
# communicate with a remote task 
Example 41
Project: pycos   Author: pgiri   File: netpycos.py    (license) View Source Project 5 votes vote down vote up
def _udp_proc(self, location, addrinfo, task=None):
        """
        Internal use only.
        """
        task.set_daemon()
        sock = addrinfo.udp_sock
        while 1:
            msg, addr = yield sock.recvfrom(1024)
            if not msg.startswith('ping:'):
                logger.warning('ignoring UDP message from %s:%s', addr[0], addr[1])
                continue
            try:
                ping_info = deserialize(msg[len('ping:'):])
            except:
                continue
            peer_location = ping_info.get('location', None)
            if not isinstance(peer_location, Location) or peer_location in self._locations:
                continue
            if ping_info['version'] != __version__:
                logger.warning('Peer %s version %s is not %s',
                               peer_location, ping_info['version'], __version__)
                continue
            if self._ignore_peers:
                continue
            if self._secret is None:
                auth_code = None
            else:
                auth_code = ping_info.get('signature', '') + self._secret
                auth_code = hashlib.sha1(auth_code.encode()).hexdigest()
            _Peer._lock.acquire()
            peer = _Peer.peers.get((peer_location.addr, peer_location.port), None)
            _Peer._lock.release()
            if peer and peer.auth != auth_code:
                _Peer.remove(peer_location)
                peer = None

            if not peer:
                SysTask(self._acquaint_, peer_location, ping_info['signature'], addrinfo) 
Example 42
Project: pycos   Author: pgiri   File: netpycos.py    (license) View Source Project 5 votes vote down vote up
def remove(location):
        _Peer._lock.acquire()
        peer = _Peer.peers.pop((location.addr, location.port), None)
        _Peer._lock.release()
        if peer:
            logger.debug('%s: peer %s terminated', peer.addrinfo.location, peer.location)
            peer.stream = False
            RTI._peer_closed_(peer.location)
            _Peer._sign_locations.pop(peer.signature, None)
            if peer.req_task:
                peer.req_task.terminate()
            if _Peer.status_task:
                _Peer.status_task.send(PeerStatus(peer.location, peer.name, PeerStatus.Offline)) 
Example 43
Project: pycos   Author: pgiri   File: dispycos_client4.py    (license) View Source Project 5 votes vote down vote up
def rtask_proc(task=None):
    import os
    # receive object from client_proc task
    cobj = yield task.receive()
    if not cobj:
        raise StopIteration
    # Input file is already copied at where this rtask is running (by client).
    # For given input file, create an output file with each line in the output
    # file computed as length of corresponding line in input file
    cobj.result_file = 'result-%s' % cobj.data_file
    with open(cobj.data_file, 'r') as data_fd:
        with open(cobj.result_file, 'w') as result_fd:
            for lineno, line in enumerate(data_fd, start=1):
                result_fd.write('%d: %d\n' % (lineno, len(line)-1))
    # 'sleep' to simulate computing
    yield task.sleep(cobj.n)
    # transfer the result file to client
    status = yield pycos.Pycos().send_file(cobj.client.location, cobj.result_file,
                                                 overwrite=True, timeout=30)
    if status:
        print('Could not send %s to %s' % (cobj.result_file, cobj.client.location))
        cobj.result_file = None
    cobj.client.send(cobj)
    os.remove(cobj.data_file)
    os.remove(cobj.result_file)


# this generator function is used to create local task (at the client) to
# communicate with a remote task 
Example 44
Project: pycos   Author: pgiri   File: netpycos.py    (license) View Source Project 5 votes vote down vote up
def _udp_proc(self, location, addrinfo, task=None):
        """
        Internal use only.
        """
        task.set_daemon()
        sock = addrinfo.udp_sock
        while 1:
            msg, addr = yield sock.recvfrom(1024)
            if not msg.startswith(b'ping:'):
                logger.warning('ignoring UDP message from %s:%s', addr[0], addr[1])
                continue
            try:
                ping_info = deserialize(msg[len(b'ping:'):])
            except:
                continue
            peer_location = ping_info.get('location', None)
            if not isinstance(peer_location, Location) or peer_location in self._locations:
                continue
            if ping_info['version'] != __version__:
                logger.warning('Peer %s version %s is not %s',
                               peer_location, ping_info['version'], __version__)
                continue
            if self._ignore_peers:
                continue
            if self._secret is None:
                auth_code = None
            else:
                auth_code = ping_info.get('signature', '') + self._secret
                auth_code = hashlib.sha1(auth_code.encode()).hexdigest()
            _Peer._lock.acquire()
            peer = _Peer.peers.get((peer_location.addr, peer_location.port), None)
            _Peer._lock.release()
            if peer and peer.auth != auth_code:
                _Peer.remove(peer_location)
                peer = None

            if not peer:
                SysTask(self._acquaint_, peer_location, ping_info['signature'], addrinfo) 
Example 45
Project: pycos   Author: pgiri   File: netpycos.py    (license) View Source Project 5 votes vote down vote up
def remove(location):
        _Peer._lock.acquire()
        peer = _Peer.peers.pop((location.addr, location.port), None)
        _Peer._lock.release()
        if peer:
            logger.debug('%s: peer %s terminated', peer.addrinfo.location, peer.location)
            RTI._peer_closed_(peer.location)
            peer.stream = False
            _Peer._sign_locations.pop(peer.signature, None)
            if peer.req_task:
                peer.req_task.terminate()
            if _Peer.status_task:
                _Peer.status_task.send(PeerStatus(peer.location, peer.name, PeerStatus.Offline)) 
Example 46
Project: pycos   Author: pgiri   File: dispycos_client4.py    (license) View Source Project 5 votes vote down vote up
def rtask_proc(task=None):
    import os
    # receive object from client_proc task
    cobj = yield task.receive()
    if not cobj:
        raise StopIteration
    # Input file is already copied at where this rtask is running (by client).
    # For given input file, create an output file with each line in the output
    # file computed as length of corresponding line in input file
    cobj.result_file = 'result-%s' % cobj.data_file
    with open(cobj.data_file, 'r') as data_fd:
        with open(cobj.result_file, 'w') as result_fd:
            for lineno, line in enumerate(data_fd, start=1):
                result_fd.write('%d: %d\n' % (lineno, len(line)-1))
    # 'sleep' to simulate computing
    yield task.sleep(cobj.n)
    # transfer the result file to client
    status = yield pycos.Pycos().send_file(cobj.client.location, cobj.result_file,
                                                 overwrite=True, timeout=30)
    if status:
        print('Could not send %s to %s' % (cobj.result_file, cobj.client.location))
        cobj.result_file = None
    cobj.client.send(cobj)
    os.remove(cobj.data_file)
    os.remove(cobj.result_file)


# this generator function is used to create local task (at the client) to
# communicate with a remote task 
Example 47
Project: pygrunt   Author: elementbound   File: project.py    (GNU General Public License v3.0) View Source Project 5 votes vote down vote up
def unhook_stage(self, stage, hook):
        self.stage_hooks[self._hook_key(stage)].remove(hook)

    # Run project 
Example 48
Project: charm-plumgrid-gateway   Author: openstack   File: hookenv.py    (Apache License 2.0) View Source Project 5 votes vote down vote up
def relation_set(relation_id=None, relation_settings=None, **kwargs):
    """Set relation information for the current unit"""
    relation_settings = relation_settings if relation_settings else {}
    relation_cmd_line = ['relation-set']
    accepts_file = "--file" in subprocess.check_output(
        relation_cmd_line + ["--help"], universal_newlines=True)
    if relation_id is not None:
        relation_cmd_line.extend(('-r', relation_id))
    settings = relation_settings.copy()
    settings.update(kwargs)
    for key, value in settings.items():
        # Force value to be a string: it always should, but some call
        # sites pass in things like dicts or numbers.
        if value is not None:
            settings[key] = "{}".format(value)
    if accepts_file:
        # --file was introduced in Juju 1.23.2. Use it by default if
        # available, since otherwise we'll break if the relation data is
        # too big. Ideally we should tell relation-set to read the data from
        # stdin, but that feature is broken in 1.23.2: Bug #1454678.
        with tempfile.NamedTemporaryFile(delete=False) as settings_file:
            settings_file.write(yaml.safe_dump(settings).encode("utf-8"))
        subprocess.check_call(
            relation_cmd_line + ["--file", settings_file.name])
        os.remove(settings_file.name)
    else:
        for key, value in settings.items():
            if value is None:
                relation_cmd_line.append('{}='.format(key))
            else:
                relation_cmd_line.append('{}={}'.format(key, value))
        subprocess.check_call(relation_cmd_line)
    # Flush cache of any relation-gets for local unit
    flush(local_unit()) 
Example 49
Project: charm-plumgrid-gateway   Author: openstack   File: ceph.py    (Apache License 2.0) View Source Project 5 votes vote down vote up
def delete_keyring(service):
    """Delete an existing Ceph keyring."""
    keyring = _keyring_path(service)
    if not os.path.exists(keyring):
        log('Keyring does not exist at %s' % keyring, level=WARNING)
        return

    os.remove(keyring)
    log('Deleted ring at %s.' % keyring, level=INFO) 
Example 50
Project: qgis_wp   Author: Zverik   File: mainplugin.py    (GNU General Public License v3.0) View Source Project 5 votes vote down vote up
def openOSM(self, filename=None):
        """Converts an OSM file to GeoPackage, loads and styles it."""
        if not filename:
            filename = QFileDialog.getOpenFileName(
                parent=None,
                caption=self.tr(u'Select OpenStreetMap file'),
                filter=self.tr(u'OSM or GeoPackage File') + u' (*.osm *.pbf *.gpkg)')
        if not filename or not os.path.isfile(filename):
            return
        filename = os.path.abspath(filename)
        gpkgFile = os.path.splitext(filename)[0] + '.gpkg'
        if filename.endswith('.gpkg'):
            self.openGeoPackage(filename)
            return

        if os.path.isfile(gpkgFile):
            os.remove(gpkgFile)
        if isWindows():
            cmd = ['cmd.exe', '/C', 'ogr2ogr.exe']
        else:
            cmd = ['ogr2ogr']

        cmd.extend(['--config', 'OSM_USE_CUSTOM_INDEXING', 'NO'])
        iniFile = os.path.join(self.path, 'res', 'osmconf.ini')
        cmd.extend(['--config', 'OSM_CONFIG_FILE', iniFile])
        cmd.extend(['-t_srs', 'EPSG:3857'])
        cmd.extend(['-overwrite'])
        cmd.extend(['-f', 'GPKG', gpkgFile, filename])
        try:
            GdalUtils.runGdal(cmd, ProgressMock())
        except IOError as e:
            self.iface.messageBar().pushCritical(
                self.tr(u'Open OSM Data'), self.tr(u'Error running ogr2ogr: {}').format(e))
            return
        if 'FAILURE' in GdalUtils.consoleOutput:
            self.iface.messageBar().pushCritical(
                self.tr(u'Open OSM Data'), self.tr(u'Error converting OSM to GeoPackage.'))
            return
        self.openGeoPackage(gpkgFile)