Python os.listdir() Examples

The following are code examples for showing how to use os.listdir(). They are from open source Python projects. You can vote up the examples you like or vote down the ones you don't like.

Example 1
Project: unicorn-hat-hd   Author: pimoroni   File: weather-icons.py    MIT License 6 votes vote down vote up
def help():
    print("""
    Usage:
    python weather-icons.py options

    options:
    loop
    image-file.png

    example:
    weather-icons.py loop
    weather-icons.py clear-day.png

    try one of the files from this list:
    {}
    """.format(', '.join(os.listdir(folder_path)))) 
Example 2
Project: unicorn-hat-hd   Author: pimoroni   File: weather-icons.py    MIT License 6 votes vote down vote up
def loop():

    print('Looping through all images in folder {}\n'
          'CRL+C to skip image'.format(folder_path))

    try:

        for img_file in os.listdir(folder_path):

            if img_file.endswith(icon_extension):

                print('Drawing image: {}'.format(folder_path + img_file))

                img = Image.open(folder_path + img_file)

                draw_animation(img)

            else:

                print('Not using this file, might be not an image: {}'.format(img_file))

    except KeyboardInterrupt:
        unicorn.off()

    unicorn.off() 
Example 3
Project: unicorn-hat-hd   Author: pimoroni   File: weather-icons.py    MIT License 6 votes vote down vote up
def weather_icons():
    try:

        if argv[1] == 'loop':

            loop()

        elif argv[1] in os.listdir(folder_path):

            print('Drawing Image: {}'.format(argv[1]))

            img = Image.open(folder_path + argv[1])

            draw_animation(img)
            unicorn.off()

        else:
            help()

    except IndexError:
        help() 
Example 4
Project: Gurux.DLMS.Python   Author: Gurux   File: GXManufacturerCollection.py    GNU General Public License v2.0 6 votes vote down vote up
def readManufacturerSettings(cls, manufacturers, path):
        # pylint: disable=broad-except
        manufacturers = []
        files = [f for f in listdir(path) if isfile(join(path, f))]
        if files:
            for it in files:
                if it.endswith(".obx"):
                    try:
                        manufacturers.append(cls.__parse(os.path.join(path, it)))
                    except Exception as e:
                        print(e)
                        continue

    #
    # Serialize manufacturer from the xml.
    #
    # @param in
    #            Input stream.
    # Serialized manufacturer.
    # 
Example 5
Project: factotum   Author: Denubis   File: update.py    GNU General Public License v3.0 6 votes vote down vote up
def copytree(src, dst, symlinks = False, ignore = None):
	if not os.path.exists(dst):
		os.makedirs(dst)
		shutil.copystat(src, dst)
	lst = os.listdir(src)
	if ignore:
		excl = ignore(src, lst)
		lst = [x for x in lst if x not in excl]
	for item in lst:
		s = os.path.join(src, item)
		d = os.path.join(dst, item)
		if symlinks and os.path.islink(s):
			if os.path.lexists(d):
				os.remove(d)
			os.symlink(os.readlink(s), d)
			try:
				st = os.lstat(s)
				mode = stat.S_IMODE(st.st_mode)
				os.lchmod(d, mode)
			except:
				pass # lchmod not available
		elif os.path.isdir(s):
			copytree(s, d, symlinks, ignore)
		else:
			shutil.copy2(s, d) 
Example 6
Project: fs_image   Author: facebookincubator   File: yum_dnf_from_snapshot.py    MIT License 6 votes vote down vote up
def _dummies_for_protected_paths(protected_paths) -> Mapping[str, str]:
    '''
    Some locations (e.g. /meta/ and mountpoints) should be off-limits to
    writes by RPMs.  We enforce that by bind-mounting an empty file or
    directory on top of each one of them.
    '''
    with tempfile.TemporaryDirectory() as td_name, \
            tempfile.NamedTemporaryFile() as tf:
        # NB: There may be duplicates in protected_paths, so we normalize.
        # If the duplicates include both a file and a directory, this picks
        # one arbitrarily, and if the type on disk is different, we will
        # fail at mount time.  This doesn't seem worth an explicit check.
        yield {
            os.path.normpath(p): (td_name if p.endswith('/') else tf.name)
                for p in protected_paths
        }
        # NB: The bind mount is read-only, so this is just paranoia.  If it
        # were left RW, we'd need to check its owner / permissions too.
        for expected, actual in (
            ([], os.listdir(td_name)),
            (b'', tf.read()),
        ):
            assert expected == actual, \
                f'Some RPM wrote {actual} to {protected_paths}' 
Example 7
Project: fs_image   Author: facebookincubator   File: temp_snapshot.py    MIT License 6 votes vote down vote up
def _make_test_yum_dnf_conf(
    yum_dnf: str, repos_path: Path, gpg_key_path: Path,
) -> str:
    return textwrap.dedent(f'''\
        [main]
        cachedir=/var/cache/{yum_dnf}
        debuglevel=2
        keepcache=1
        logfile=/var/log/{yum_dnf}.log
        pkgpolicy=newest
        showdupesfromrepos=1
    ''') + '\n\n'.join(
        textwrap.dedent(f'''\
            [{repo}]
            baseurl={(repos_path / repo).file_url()}
            enabled=1
            name={repo}
            gpgkey={gpg_key_path.file_url()}
        ''') for repo in os.listdir(repos_path.decode())
            if repo not in ('dnf.conf', 'yum.conf')
    ) 
Example 8
Project: fs_image   Author: facebookincubator   File: repo_server.py    MIT License 6 votes vote down vote up
def read_snapshot_dir(path: Path):
    db_path = path / 'snapshot.sql3'
    assert os.path.exists(db_path), f'no {db_path}, use rpm_repo_snapshot()'
    location_to_obj = add_snapshot_db_objs(sqlite3.connect(db_path))
    for repo in os.listdir(path / 'repos'):
        # Make JSON metadata for the repo's GPG keys.
        key_dir = path / 'repos' / repo / 'gpg_keys'
        for key_filename in os.listdir(key_dir.decode()):
            with open(key_dir / key_filename, 'rb') as infile:
                key_content = infile.read()
            location_to_obj[os.path.join(repo.decode(), key_filename)] = {
                'size': len(key_content),
                # We don't have a good timestamp for these, so set it to
                # "now".  Caching efficiency losses should be negligible :)
                'build_timestamp': int(time.time()),
                'content_bytes': key_content,  # Instead of `storage_id`
            }
    return location_to_obj 
Example 9
Project: fs_image   Author: facebookincubator   File: test_subvolume_garbage_collector.py    MIT License 6 votes vote down vote up
def test_garbage_collect_and_make_new_subvolume(self):
        with self._gc_test_case() as n, \
             tempfile.TemporaryDirectory() as json_dir:
            sgc.subvolume_garbage_collector([
                '--refcounts-dir', n.refs_dir,
                '--subvolumes-dir', n.subs_dir,
                '--new-subvolume-wrapper-dir', 'new:subvol',
                '--new-subvolume-json', os.path.join(json_dir, 'OUT'),
            ])
            self.assertEqual(['OUT'], os.listdir(json_dir))
            self.assertEqual(
                n.kept_refs | {'new:subvol.json'}, set(os.listdir(n.refs_dir)),
            )
            self.assertEqual(
                n.kept_subs | {'new:subvol'}, set(os.listdir(n.subs_dir)),
            ) 
Example 10
Project: fs_image   Author: facebookincubator   File: test_fs_utils.py    MIT License 6 votes vote down vote up
def test_path_decode(self):
        with tempfile.TemporaryDirectory() as td:
            bad_utf_path = Path(td) / _BAD_UTF
            self.assertTrue(bad_utf_path.endswith(b'/' + _BAD_UTF))
            with open(bad_utf_path, 'w'):
                pass
            res = subprocess.run([
                sys.executable, '-c', f'import os;print(os.listdir({repr(td)}))'
            ], stdout=subprocess.PIPE)
            # Path's handling of invalid UTF-8 matches the default for
            # Python3 when it gets such data from the filesystem.
            self.assertEqual(
                # Both evaluate to surrogate-escaped ['\udcc3('] plus a newline.
                repr([bad_utf_path.basename().decode()]) + '\n',
                res.stdout.decode(),
            ) 
Example 11
Project: leapp-repository   Author: oamg   File: test_lib_backup.py    Apache License 2.0 6 votes vote down vote up
def test_backup_file():
    tmpdir = tempfile.mkdtemp()
    try:
        file_path = os.path.join(tmpdir, 'foo-bar')
        content = 'test content\n'
        with open(file_path, 'w') as f:
            f.write(content)

        backup_path = backup_file(file_path)

        assert os.path.basename(backup_path) == 'foo-bar.leapp-backup'
        assert os.path.dirname(backup_path) == tmpdir
        assert len(os.listdir(tmpdir)) == 2
        st = os.stat(backup_path)
        assert stat.S_IMODE(st.st_mode) == (stat.S_IRUSR | stat.S_IWUSR)
        with open(backup_path, 'r') as f:
            backup_content = f.read()
        assert backup_content == content
        with open(file_path, 'r') as f:
            orig_content = f.read()
        assert orig_content == content
    finally:
        shutil.rmtree(tmpdir) 
Example 12
Project: leapp-repository   Author: oamg   File: test_lib_backup.py    Apache License 2.0 6 votes vote down vote up
def test_backup_file_target_exists():
    tmpdir = tempfile.mkdtemp()
    try:
        file_path = os.path.join(tmpdir, 'foo-bar')
        primary_target_path = '%s.leapp-backup' % file_path
        primary_target_content = 'do not overwrite me'
        content = 'test_content\n'
        with open(file_path, 'w') as f:
            f.write(content)
        with open(primary_target_path, 'w') as f:
            f.write(primary_target_content)

        backup_path = backup_file(file_path)

        assert os.path.basename(backup_path).startswith('foo-bar.leapp-backup.')
        assert os.path.dirname(backup_path) == tmpdir
        assert len(os.listdir(tmpdir)) == 3
        st = os.stat(backup_path)
        assert stat.S_IMODE(st.st_mode) == (stat.S_IRUSR | stat.S_IWUSR)
        with open(backup_path, 'r') as f:
            assert f.read() == content
        with open(primary_target_path, 'r') as f:
            assert f.read() == primary_target_content
    finally:
        shutil.rmtree(tmpdir) 
Example 13
Project: leapp-repository   Author: oamg   File: test_converter.py    Apache License 2.0 6 votes vote down vote up
def test_match(self):

        for f in [fe for fe in os.listdir(NTP_MATCH_DIR) if fe.endswith('conf')]:
            # get recorded actual result
            num = f.split('.')[0].split('_')[0]
            ntp_conf = os.path.join(NTP_MATCH_DIR, f)
            step_tickers = self._check_existance(
                    os.path.join(NTP_MATCH_DIR, '%s_step_tickers' % num))
            config = ntp2chrony.NtpConfiguration('',
                                                 ntp_conf,
                                                 step_tickers=step_tickers)
            potential_chrony_keys = os.path.join(CHRONY_MATCH_DIR, "%s_chrony.keys" % num)
            actual_data = config.get_chrony_conf(chrony_keys_path=potential_chrony_keys)
            expected_fname = os.path.join(CHRONY_MATCH_DIR, "%s_chrony.conf" % num)
            # make sure recorded and generated configs match
            self._do_match(expected_fname, actual_data)
            actual_keys = config.get_chrony_keys()
            expected_keys_file = self._check_existance(potential_chrony_keys)
            # if keys are recorded or generated make sure they match
            if actual_keys and expected_keys_file != '':
                self._do_match(expected_keys_file, actual_keys) 
Example 14
Project: leapp-repository   Author: oamg   File: library.py    Apache License 2.0 6 votes vote down vote up
def _get_parsed_configs(read_func=utils.read_file, listdir=os.listdir):
    res = []
    try:
        for fname in listdir(utils.VSFTPD_CONFIG_DIR):
            path = os.path.join(utils.VSFTPD_CONFIG_DIR, fname)
            if not path.endswith('.conf'):
                continue
            content = utils.get_config_contents(path, read_func=read_func)
            if content is None:
                continue
            parsed = _parse_config(path, content)
            if parsed is not None:
                res.append((path, parsed))
    except OSError as e:
        if e.errno != errno.ENOENT:
            api.current_logger().warning('Failed to read vsftpd configuration directory: %s'
                                         % e)
    return res 
Example 15
Project: leapp-repository   Author: oamg   File: rhsm.py    Apache License 2.0 6 votes vote down vote up
def get_existing_product_certificates(context, rhsm_info):
    """
    Retrieves information about existing product certificates on the system.

    :param context: An instance of a mounting.IsolatedActions class
    :type context: mounting.IsolatedActions class
    :param rhsm_info: An instance of a RHSMInfo derived model.
    :type rhsm_info: RHSMInfo derived model
    """
    if not rhsm_info.existing_product_certificates:
        for path in ('/etc/pki/product', '/etc/pki/product-default'):
            if not os.path.isdir(context.full_path(path)):
                continue
            certs = [os.path.join(path, f) for f in os.listdir(context.full_path(path))
                     if os.path.isfile(os.path.join(context.full_path(path), f))]
            if not certs:
                continue
            rhsm_info.existing_product_certificates.extend(certs) 
Example 16
Project: godot-mono-builds   Author: godotengine   File: android.py    MIT License 6 votes vote down vote up
def android_autodetect_cmake(opts: AndroidOpts) -> str:
    from distutils.version import LooseVersion
    from os import listdir

    sdk_cmake_basedir = path_join(opts.android_sdk_root, 'cmake')
    versions = []

    for entry in listdir(sdk_cmake_basedir):
        if os.path.isdir(path_join(sdk_cmake_basedir, entry)):
            try:
                version = LooseVersion(entry)
                versions += [version]
            except ValueError:
                continue # Not a version folder

    if len(versions) == 0:
        raise BuildError('Cannot auto-detect Android CMake version')

    lattest_version = str(sorted(versions)[-1])
    print('Auto-detected Android CMake version: ' + lattest_version)

    return lattest_version 
Example 17
Project: alfred-yubikey-otp   Author: robertoriv   File: workflow.py    MIT License 6 votes vote down vote up
def _delete_directory_contents(self, dirpath, filter_func):
        """Delete all files in a directory.

        :param dirpath: path to directory to clear
        :type dirpath: ``unicode`` or ``str``
        :param filter_func function to determine whether a file shall be
            deleted or not.
        :type filter_func ``callable``

        """
        if os.path.exists(dirpath):
            for filename in os.listdir(dirpath):
                if not filter_func(filename):
                    continue
                path = os.path.join(dirpath, filename)
                if os.path.isdir(path):
                    shutil.rmtree(path)
                else:
                    os.unlink(path)
                self.logger.debug('deleted : %r', path) 
Example 18
Project: pyblish-win   Author: pyblish   File: webchecker.py    GNU Lesser General Public License v3.0 6 votes vote down vote up
def open_file(self, url):
        path = urllib.url2pathname(urllib.unquote(url))
        if os.path.isdir(path):
            if path[-1] != os.sep:
                url = url + '/'
            indexpath = os.path.join(path, "index.html")
            if os.path.exists(indexpath):
                return self.open_file(url + "index.html")
            try:
                names = os.listdir(path)
            except os.error, msg:
                exc_type, exc_value, exc_tb = sys.exc_info()
                raise IOError, msg, exc_tb
            names.sort()
            s = MyStringIO("file:"+url, {'content-type': 'text/html'})
            s.write('<BASE HREF="file:%s">\n' %
                    urllib.quote(os.path.join(path, "")))
            for name in names:
                q = urllib.quote(name)
                s.write('<A HREF="%s">%s</A>\n' % (q, q))
            s.seek(0)
            return s 
Example 19
Project: pyblish-win   Author: pyblish   File: PyncheWidget.py    GNU Lesser General Public License v3.0 6 votes vote down vote up
def make_view_popups(switchboard, root, extrapath):
    viewers = []
    # where we are in the file system
    dirs = [os.path.dirname(__file__)] + extrapath
    for dir in dirs:
        if dir == '':
            dir = '.'
        for file in os.listdir(dir):
            if file[-9:] == 'Viewer.py':
                name = file[:-3]
                try:
                    module = __import__(name)
                except ImportError:
                    # Pynche is running from inside a package, so get the
                    # module using the explicit path.
                    pkg = __import__('pynche.'+name)
                    module = getattr(pkg, name)
                if hasattr(module, 'ADDTOVIEW') and module.ADDTOVIEW:
                    # this is an external viewer
                    v = PopupViewer(module, name, switchboard, root)
                    viewers.append(v)
    # sort alphabetically
    viewers.sort()
    return viewers 
Example 20
Project: pyblish-win   Author: pyblish   File: reindent.py    GNU Lesser General Public License v3.0 6 votes vote down vote up
def check(file):
    if os.path.isdir(file) and not os.path.islink(file):
        if verbose:
            print "listing directory", file
        names = os.listdir(file)
        for name in names:
            fullname = os.path.join(file, name)
            if ((recurse and os.path.isdir(fullname) and
                 not os.path.islink(fullname) and
                 not os.path.split(fullname)[1].startswith("."))
                or name.lower().endswith(".py")):
                check(fullname)
        return

    if verbose:
        print "checking", file, "...",
    try:
        f = io.open(file)
    except IOError, msg:
        errprint("%s: I/O Error: %s" % (file, str(msg)))
        return 
Example 21
Project: pyblish-win   Author: pyblish   File: ftpmirror.py    GNU Lesser General Public License v3.0 6 votes vote down vote up
def remove(fullname):
    if os.path.isdir(fullname) and not os.path.islink(fullname):
        try:
            names = os.listdir(fullname)
        except os.error:
            names = []
        ok = 1
        for name in names:
            if not remove(os.path.join(fullname, name)):
                ok = 0
        if not ok:
            return 0
        try:
            os.rmdir(fullname)
        except os.error, msg:
            print "Can't remove local directory %r: %s" % (fullname, msg)
            return 0 
Example 22
Project: pyblish-win   Author: pyblish   File: byext.py    GNU Lesser General Public License v3.0 6 votes vote down vote up
def statdir(self, dir):
        self.addstats("<dir>", "dirs", 1)
        try:
            names = sorted(os.listdir(dir))
        except os.error as err:
            sys.stderr.write("Can't list %s: %s\n" % (dir, err))
            self.addstats("<dir>", "unlistable", 1)
            return
        for name in names:
            if name.startswith(".#"):
                continue # Skip CVS temp files
            if name.endswith("~"):
                continue# Skip Emacs backup files
            full = os.path.join(dir, name)
            if os.path.islink(full):
                self.addstats("<lnk>", "links", 1)
            elif os.path.isdir(full):
                self.statdir(full)
            else:
                self.statfile(full) 
Example 23
Project: pyblish-win   Author: pyblish   File: cleanfuture.py    GNU Lesser General Public License v3.0 6 votes vote down vote up
def check(file):
    if os.path.isdir(file) and not os.path.islink(file):
        if verbose:
            print "listing directory", file
        names = os.listdir(file)
        for name in names:
            fullname = os.path.join(file, name)
            if ((recurse and os.path.isdir(fullname) and
                 not os.path.islink(fullname))
                or name.lower().endswith(".py")):
                check(fullname)
        return

    if verbose:
        print "checking", file, "...",
    try:
        f = open(file)
    except IOError, msg:
        errprint("%r: I/O Error: %s" % (file, str(msg)))
        return 
Example 24
Project: pyblish-win   Author: pyblish   File: checkappend.py    GNU Lesser General Public License v3.0 6 votes vote down vote up
def check(file):
    if os.path.isdir(file) and not os.path.islink(file):
        if verbose:
            print "%r: listing directory" % (file,)
        names = os.listdir(file)
        for name in names:
            fullname = os.path.join(file, name)
            if ((os.path.isdir(fullname) and
                 not os.path.islink(fullname))
                or os.path.normcase(name[-3:]) == ".py"):
                check(fullname)
        return

    try:
        f = open(file)
    except IOError, msg:
        errprint("%r: I/O Error: %s" % (file, msg))
        return 
Example 25
Project: pyblish-win   Author: pyblish   File: test_discovery.py    GNU Lesser General Public License v3.0 6 votes vote down vote up
def test_discover_with_modules_that_fail_to_import(self):
        loader = unittest.TestLoader()

        listdir = os.listdir
        os.listdir = lambda _: ['test_this_does_not_exist.py']
        isfile = os.path.isfile
        os.path.isfile = lambda _: True
        orig_sys_path = sys.path[:]
        def restore():
            os.path.isfile = isfile
            os.listdir = listdir
            sys.path[:] = orig_sys_path
        self.addCleanup(restore)

        suite = loader.discover('.')
        self.assertIn(os.getcwd(), sys.path)
        self.assertEqual(suite.countTestCases(), 1)
        test = list(list(suite)[0])[0] # extract test from suite

        with self.assertRaises(ImportError):
            test.test_this_does_not_exist() 
Example 26
Project: pyblish-win   Author: pyblish   File: test_main.py    GNU Lesser General Public License v3.0 6 votes vote down vote up
def test_filename_changing_on_output_two_files(self):
        """2to3 two files in one directory with a new output dir."""
        self.setup_test_source_trees()
        err = StringIO.StringIO()
        py2_files = [self.trivial_py2_file, self.init_py2_file]
        expected_files = set(os.path.basename(name) for name in py2_files)
        ret = self.run_2to3_capture(
                ["-n", "-w", "--write-unchanged-files",
                 "--no-diffs", "--output-dir", self.py3_dest_dir] + py2_files,
                StringIO.StringIO(""), StringIO.StringIO(), err)
        self.assertEqual(ret, 0)
        stderr = err.getvalue()
        self.assertIn(
                "Output in %r will mirror the input directory %r layout" % (
                        self.py3_dest_dir, self.py2_src_dir), stderr)
        self.assertEqual(expected_files, set(os.listdir(self.py3_dest_dir))) 
Example 27
Project: fs_image   Author: facebookincubator   File: update_package_db.py    MIT License 5 votes vote down vote up
def _read_json_dir_db(path: Path) -> PackageTagDb:
    db = {}
    for package in os.listdir(path):
        tag_to_info = db.setdefault(package.decode(), {})
        for tag_json in os.listdir(path / package):
            tag_json = tag_json.decode()
            assert tag_json.endswith(_JSON), (path, package, tag_json)
            with open(path / package / tag_json) as infile:
                _read_generated_header(infile)
                tag_to_info[tag_json[:-len(_JSON)]] = json.load(infile)
    return db 
Example 28
Project: fs_image   Author: facebookincubator   File: temp_snapshot.py    MIT License 5 votes vote down vote up
def make_temp_snapshot(
    repos, out_dir, gpg_key_path, gpg_key_whitelist_dir,
) -> Path:
    'Generates temporary RPM repo snapshots for tests to use as inputs.'
    snapshot_dir = out_dir / 'temp_snapshot_dir'
    os.mkdir(snapshot_dir)

    with temp_repos_steps(repo_change_steps=[repos]) as repos_root:
        snapshot_repos(
            dest=snapshot_dir,
            # Snapshot the 0th step only, since only that is defined
            yum_conf_content=_make_test_yum_dnf_conf(
                'yum', repos_root / '0', gpg_key_path,
            ),
            dnf_conf_content=_make_test_yum_dnf_conf(
                'dnf', repos_root / '0', gpg_key_path,
            ),
            repo_db_ctx=RepoDBContext(
                DBConnectionContext.make(
                    kind='sqlite', db_path=(out_dir / 'db.sqlite3').decode(),
                ),
                SQLDialect.SQLITE3,
            ),
            storage=Storage.make(
                key='test',
                kind='filesystem',
                base_dir=(out_dir / 'storage').decode(),
            ),
            rpm_shard=RpmShard(shard=0, modulo=1),
            gpg_key_whitelist_dir=no_gpg_keys_yet,
            retries=0,  # Nothing here should require retries, it's a bug.
        )

    # Merge the repo snapshot with the storage & RPM DB -- this makes our
    # test snapshot build target look very much like prod snapshots.
    for f in os.listdir(snapshot_dir):
        assert not os.path.exists(out_dir / f), f'Must not overwrite {f}'
        os.rename(snapshot_dir / f, out_dir / f)
    os.rmdir(snapshot_dir) 
Example 29
Project: fs_image   Author: facebookincubator   File: test_parse_repodata.py    MIT License 5 votes vote down vote up
def _listdir(path: Path) -> Set[Path]:
    return {path / p for p in os.listdir(path)} 
Example 30
Project: fs_image   Author: facebookincubator   File: test_gpg_keys.py    MIT License 5 votes vote down vote up
def test_snapshot_gpg_keys(self):
        with temp_dir() as td:
            hello_path = td / 'hello'
            with open(hello_path, 'w') as out_f:
                out_f.write('world')

            whitelist_dir = td / 'whitelist'
            os.mkdir(whitelist_dir)

            def try_snapshot(snapshot_dir):
                snapshot_gpg_keys(
                    key_urls=[hello_path.file_url()],
                    whitelist_dir=whitelist_dir,
                    snapshot_dir=snapshot_dir,
                )

            # The snapshot won't work until the key is correctly whitelisted.
            with temp_dir() as snap_dir, self.assertRaises(FileNotFoundError):
                try_snapshot(snap_dir)
            with open(whitelist_dir / 'hello', 'w') as out_f:
                out_f.write('wrong contents')
            with temp_dir() as snap_dir, self.assertRaises(AssertionError):
                try_snapshot(snap_dir)
            shutil.copy(hello_path, whitelist_dir)

            with temp_dir() as snapshot_dir:
                try_snapshot(snapshot_dir)
                self.assertEqual([b'gpg_keys'], os.listdir(snapshot_dir))
                self.assertEqual(
                    [b'hello'], os.listdir(snapshot_dir / 'gpg_keys'),
                )
                with open(snapshot_dir / 'gpg_keys/hello') as in_f:
                    self.assertEqual('world', in_f.read()) 
Example 31
Project: fs_image   Author: facebookincubator   File: test_filesystem_storage.py    MIT License 5 votes vote down vote up
def test_uncommitted(self):
        with self._temp_storage() as storage:
            self.assertEqual([], os.listdir(storage.base_dir))
            with storage.writer() as writer:
                writer.write(b'foo')
            self.assertEqual([], os.listdir(storage.base_dir))
            with self.assertRaisesRegex(RuntimeError, '^abracadabra$'):
                with storage.writer() as writer:
                    raise RuntimeError('abracadabra')
            self.assertEqual([], os.listdir(storage.base_dir)) 
Example 32
Project: fs_image   Author: facebookincubator   File: test_subvolume_garbage_collector.py    MIT License 5 votes vote down vote up
def test_garbage_collect_subvolumes(self):
        for fn in [
            lambda n: sgc.garbage_collect_subvolumes(n.refs_dir, n.subs_dir),
            self._gc_only,
        ]:
            with self._gc_test_case() as n:
                fn(n)
                self.assertEqual(n.kept_refs, set(os.listdir(n.refs_dir)))
                self.assertEqual(n.kept_subs, set(os.listdir(n.subs_dir))) 
Example 33
Project: fs_image   Author: facebookincubator   File: test_subvolume_garbage_collector.py    MIT License 5 votes vote down vote up
def test_no_gc_due_to_lock(self):
        with self._gc_test_case() as n:
            fd = os.open(n.subs_dir, os.O_RDONLY)
            try:
                fcntl.flock(fd, fcntl.LOCK_SH | fcntl.LOCK_NB)
                self._gc_only(n)

                # Sneak in a test that new subvolume creation fails when
                # its refcount already exists.
                with tempfile.TemporaryDirectory() as json_dir, \
                     self.assertRaisesRegex(
                         RuntimeError, 'Refcount already exists:',
                     ):
                    sgc.subvolume_garbage_collector([
                        '--refcounts-dir', n.refs_dir,
                        '--subvolumes-dir', n.subs_dir,
                        # This refcount was created by `_gc_test_case`.
                        '--new-subvolume-wrapper-dir', '3link:1',
                        '--new-subvolume-json', os.path.join(json_dir, 'OUT'),
                    ])

            finally:
                    os.close(fd)

            self.assertEqual(
                n.kept_refs | n.gcd_refs, set(os.listdir(n.refs_dir))
            )
            self.assertEqual(
                n.kept_subs | n.gcd_subs, set(os.listdir(n.subs_dir))
            ) 
Example 34
Project: fs_image   Author: facebookincubator   File: test_fs_utils.py    MIT License 5 votes vote down vote up
def _check_has_one_file(self, dir_path, filename, contents):
        self.assertEqual([filename.encode()], os.listdir(dir_path))
        with open(dir_path / filename) as in_f:
            self.assertEqual(contents, in_f.read()) 
Example 35
Project: fs_image   Author: facebookincubator   File: procfs_serde.py    MIT License 5 votes vote down vote up
def deserialize_untyped(subvol, path_with_ext: str) -> Any:
    # NB: while `isdir` and `isfile` do follow symbolic links, `subvol.path`
    # will prevent the use of symlinks that take us outside the subvol.
    if os.path.isdir(subvol.path(path_with_ext)):
        return {
            k: deserialize_untyped(subvol, os.path.join(path_with_ext, k))
                for k in os.listdir(subvol.path(path_with_ext).decode())
        }
    elif os.path.isfile(subvol.path(path_with_ext)):
        with open(subvol.path(path_with_ext), 'rb') as f:
            s = f.read()

        _, ext = os.path.splitext(path_with_ext)
        if ext == '.bin':
            return s

        # All other extensions had a trailing newline appended.
        if not s.endswith(b'\n'):
            raise AssertionError(
                f'{path_with_ext} must have had a trailing newline, got {s}'
            )
        s = s[:-1]

        if ext == '.image_path' or ext == '.host_path':
            return s
        elif ext == '':
            return s.decode()
        else:
            raise AssertionError(f'Unsupported extension (path_with_ext)')
    else:
        raise AssertionError(f'{path_with_ext} is neither a file nor a dir') 
Example 36
Project: leapp-repository   Author: oamg   File: actor.py    Apache License 2.0 5 votes vote down vote up
def process(self):
        location = self.get_folder_path('bundled-rpms')
        local_rpms = []
        for name in os.listdir(location):
            if name.endswith('.rpm'):
                # It is important to put here the realpath to the files here, because
                # symlinks cannot be resolved properly inside of the target userspace since they use the /installroot
                # mount target
                local_rpms.append(os.path.realpath(os.path.join(location, name)))
        if local_rpms:
            self.produce(RpmTransactionTasks(local_rpms=local_rpms)) 
Example 37
Project: leapp-repository   Author: oamg   File: library.py    Apache License 2.0 5 votes vote down vote up
def get_vsftpd_facts(read_func=utils.read_file, listdir=os.listdir):
    config_hash = utils.get_default_config_hash(read_func=read_func)
    configs = _get_parsed_configs(read_func=read_func, listdir=listdir)
    res_configs = []
    for path, config in configs:
        res_configs.append(VsftpdConfig(path=path,
                                        strict_ssl_read_eof=config.get(utils.STRICT_SSL_READ_EOF),
                                        tcp_wrappers=config.get(utils.TCP_WRAPPERS)))
    return VsftpdFacts(default_config_hash=config_hash, configs=res_configs) 
Example 38
Project: leapp-repository   Author: oamg   File: modscan.py    Apache License 2.0 5 votes vote down vote up
def _create_dracut_modules():
    dracut_base_path = api.get_actor_folder_path('dracut')
    if dracut_base_path:
        dracut_base_path = os.path.abspath(dracut_base_path)
        for module in os.listdir(dracut_base_path):
            yield UpgradeDracutModule(
                name=re.sub(r'^\d+', '', module),
                module_path=os.path.join(dracut_base_path, module)
            ) 
Example 39
Project: leapp-repository   Author: oamg   File: actor.py    Apache License 2.0 5 votes vote down vote up
def process(self):
        facts = FirewalldFacts()

        try:
            tree = ElementTree.parse('/etc/firewalld/lockdown-whitelist.xml')
            root = tree.getroot()
            facts.firewall_config_command = private.getLockdownFirewallConfigCommand(root)
        except IOError:
            pass

        try:
            tree = ElementTree.parse('/etc/firewalld/direct.xml')
            root = tree.getroot()
            facts.ebtablesTablesInUse = private.getEbtablesTablesInUse(root)
        except IOError:
            pass

        ipsetTypesInUse = set()
        directory = '/etc/firewalld/ipsets'
        try:
            for filename in os.listdir(directory):
                if not filename.endswith('.xml'):
                    continue
                try:
                    tree = ElementTree.parse(os.path.join(directory, filename))
                    root = tree.getroot()
                    ipsetTypesInUse |= set(private.getIpsetTypesInUse(root))
                except IOError:
                    pass
            facts.ipsetTypesInUse = list(ipsetTypesInUse)
        except OSError:
            pass

        self.produce(facts) 
Example 40
Project: leapp-repository   Author: oamg   File: library.py    Apache License 2.0 5 votes vote down vote up
def check_files_for_compressed_ipv6():
    conf_files = get_conf_files()
    migrate_files = []
    files = [os.path.join(SendmailConfDir, re.sub(r'\.db$', '', f)) for f in os.listdir(SendmailConfDir)
             if f.endswith('.db')] + conf_files
    regex = re.compile(r'IPv6:[0-9a-fA-F:]*::')
    for filename in files:
        if not os.path.exists(filename):
            continue
        with open(filename) as file_check:
            for line in file_check:
                if regex.search(line) and not check_false_positives(os.path.basename(filename), line):
                    migrate_files.append(filename)
                    break
    return migrate_files 
Example 41
Project: PEAKachu   Author: tbischler   File: replicons.py    ISC License 5 votes vote down vote up
def _check_annotations(self):
        if self._gff_folder is None:
            print("No folder with .gff files specified")
        else:
            gff_files = [join(self._gff_folder, f) for f in listdir(
                self._gff_folder) if isfile(join(self._gff_folder, f))]
            if not gff_files:
                print("No .gff file found in specified folder")
            else:
                for gff_file in gff_files:
                    self._store_annotations(gff_file) 
Example 42
Project: PEAKachu   Author: tbischler   File: consensus_peak.py    ISC License 5 votes vote down vote up
def _store_peaks(self):
        peak_table_folder = "{}/peak_tables".format(self._project_folder)
        peak_files = [join(peak_table_folder, f) for f in listdir(
            peak_table_folder) if isfile(join(peak_table_folder, f))]
        for peak_file in peak_files:
            peak_df = pd.read_table(peak_file, sep='\t')
            for peak in peak_df.to_dict("records"):
                self._replicon_peak_dict[peak["replicon"]][
                    peak["peak_strand"]].add(
                        (peak["peak_start"], peak["peak_end"])) 
Example 43
Project: PEAKachu   Author: tbischler   File: consensus_peak.py    ISC License 5 votes vote down vote up
def _get_peak_coverage(self):
        norm_coverage_folder = "{}/normalized_coverage".format(
            self._project_folder)
        coverage_files = [join(norm_coverage_folder, f) for f in listdir(
            norm_coverage_folder) if isfile(join(norm_coverage_folder, f))]
        wiggle_parser = WiggleParser()
        cons_value_dict = defaultdict(dict)
        for coverage_file in coverage_files:
            cons_values = np.zeros(self._consensus_length)
            with open(coverage_file, 'r') as cov_fh:
                for wiggle_entry in wiggle_parser.entries(cov_fh):
                    lib_name_and_strand = wiggle_entry.track_name
                    lib_name = '_'.join(lib_name_and_strand.split('_')[:-1])
                    lib_strand = '+' if lib_name_and_strand.split(
                        '_')[-1] == "forward" else '-'
                    replicon = wiggle_entry.replicon
                    pos_value_pairs = dict(wiggle_entry.pos_value_pairs)
                    self._get_coverage_for_replicon_peaks(
                        replicon, lib_strand, pos_value_pairs, cons_values)
            cons_value_dict[lib_name][lib_strand] = cons_values
        # combine strands
        comb_cons_value_dict = {}
        for lib in cons_value_dict:
            comb_cons_value_dict[lib] = np.zeros(self._consensus_length)
            for strand in cons_value_dict[lib]:
                comb_cons_value_dict[lib] += cons_value_dict[lib][strand]
        return comb_cons_value_dict 
Example 44
Project: meta-transfer-learning   Author: erfaneshrati   File: miniimagenet.py    MIT License 5 votes vote down vote up
def _read_classes(dir_path):
    """
    Read the WNID directories in a directory.
    """
    return [ImageNetClass(os.path.join(dir_path, f)) for f in os.listdir(dir_path)
            if f.startswith('n')]

# pylint: disable=R0903 
Example 45
Project: meta-transfer-learning   Author: erfaneshrati   File: miniimagenet.py    MIT License 5 votes vote down vote up
def sample(self, num_images):
        """
        Sample images (as numpy arrays) from the class.

        Returns:
          A sequence of 84x84x3 numpy arrays.
          Each pixel ranges from 0 to 1.
        """
        names = [f for f in os.listdir(self.dir_path) if f.endswith('.JPEG')]
        random.shuffle(names)
        images = []
        for name in names[:num_images]:
            images.append(self._read_image(name))
        return images 
Example 46
Project: Wide-Residual-Nets-for-SETI   Author: sgrvinod   File: average_scores.py    Apache License 2.0 5 votes vote down vote up
def average_scores(input_folder, output_path):
    """
    Averages scores of several CSV files generated by test.py

    Args:
        input_folder (path): folder with models' scores' CSVs in it.
        output_path (path): path of output CSV file with averaged scores, ready for submission to SETI scoreboards
    """
    csv_files = [f for f in os.listdir(input_folder) if f.endswith('.csv')]
    model_scores = []
    for i, csv in enumerate(csv_files):
        df = pd.read_csv(os.path.join(input_folder, csv), index_col=0, header=None)
        if i == 0:
            index = df.index
        else:
            assert index.equals(df.index), "Indices of one or more files do not match!"
        model_scores.append(df)
    print "Read %d files. Averaging..." % len(model_scores)

    concat_scores = pd.concat(model_scores)
    averaged_scores = concat_scores.groupby(level=0).mean()
    assert averaged_scores.shape[0] == len(list(index)), "Something went wrong when concatenating/averaging!"
    averaged_scores = averaged_scores.reindex(index)

    averaged_scores.to_csv(output_path, header=False, index=True)
    print "Averaged scores saved to %s" % output_path 
Example 47
Project: gog-galaxy-plugin-downloader   Author: Slashbunny   File: download.py    GNU General Public License v3.0 5 votes vote down vote up
def fix_plugin_directories(dest):
    """
    Loops through all folders in the output directory, reads the their manifest
    file, and renames the directory to the standard <platform>_<guid> format
    """
    # Loop through directories in the destination directory
    for existing_dir in os.listdir(dest):
        existing_path = os.path.join(dest, existing_dir)

        # Skip non-directories
        if not os.path.isdir(existing_path):
            continue

        try:
            with open(os.path.join(existing_path, 'manifest.json')) as m:
                data = json.load(m)
                platform = data['platform']
                guid = data['guid']

                # Close json file
                m.close()

                expected_dir = platform + '_' + guid
                expected_path = os.path.join(dest, expected_dir)

                if existing_path != expected_path:
                    print('NOTICE: Folder should be "{}", but it is named "{}"'
                          .format(expected_dir, existing_dir))

                    if os.path.isdir(expected_path):
                        print('NOTICE: Correct pathed plugin already exists,'
                              + ' deleting extra plugin')
                        shutil.rmtree(existing_path)
                    else:
                        print('NOTICE: Renaming folder to proper name')
                        shutil.move(existing_path, expected_path)
        except (FileNotFoundError, json.decoder.JSONDecodeError, KeyError):
            print('ERROR: Could not read plugin data from {} folder'
                  .format(existing_path)) 
Example 48
Project: gog-galaxy-plugin-downloader   Author: Slashbunny   File: download.py    GNU General Public License v3.0 5 votes vote down vote up
def delete_old_plugins(data, dest):
    """
    Deletes versions of plugins that don't match the yaml manifest. In theory
    this should only be older versions, but any version that doesn't match
    the yaml definition will be deleted

    This explicitly does not touch other directories that do not match the
    known plugin names.

    If the version doesn't match the yaml definition, the directory is removed
    """
    # Loop over each plugin
    for name, data in data.items():
        expected_plugin_dir = name + '_' + data['guid']

        # Loop through directories in the destination directory
        for item in os.listdir(dest):
            full_path = os.path.join(dest, item)

            # Skip non-directories
            if not os.path.isdir(full_path):
                continue

            # Skip directory names that are in the valid plugin directory array
            if item == expected_plugin_dir:
                continue

            # If any other directory begins with <plugin_name>_, delete it
            if item.startswith(name + '_'):
                print('Deleting wrong version "{}" from "{}"'
                      .format(item, dest))
                shutil.rmtree(full_path) 
Example 49
Project: malcode   Author: moonsea   File: filtersamemd5.py    GNU General Public License v3.0 5 votes vote down vote up
def countFile(dirpath, suffix=''):
    return len([x for x in os.listdir(dirpath) if (x.split('.')[-1] == suffix)]) 
Example 50
Project: malcode   Author: moonsea   File: wingenasm.py    GNU General Public License v3.0 5 votes vote down vote up
def countFile(dirpath, suffix=''):
    return len([x for x in os.listdir(dirpath) if (x.split('.')[-1] == suffix)])