Python os.listdir() Examples

The following are code examples for showing how to use os.listdir(). They are from open source Python projects. You can vote up the examples you like or vote down the ones you don't like.

Example 1
Project: factotum   Author: Denubis   File: update.py    GNU General Public License v3.0 10 votes vote down vote up
def copytree(src, dst, symlinks = False, ignore = None):
	if not os.path.exists(dst):
		os.makedirs(dst)
		shutil.copystat(src, dst)
	lst = os.listdir(src)
	if ignore:
		excl = ignore(src, lst)
		lst = [x for x in lst if x not in excl]
	for item in lst:
		s = os.path.join(src, item)
		d = os.path.join(dst, item)
		if symlinks and os.path.islink(s):
			if os.path.lexists(d):
				os.remove(d)
			os.symlink(os.readlink(s), d)
			try:
				st = os.lstat(s)
				mode = stat.S_IMODE(st.st_mode)
				os.lchmod(d, mode)
			except:
				pass # lchmod not available
		elif os.path.isdir(s):
			copytree(s, d, symlinks, ignore)
		else:
			shutil.copy2(s, d) 
Example 2
Project: fs_image   Author: facebookincubator   File: yum_dnf_from_snapshot.py    MIT License 7 votes vote down vote up
def _dummies_for_protected_paths(protected_paths) -> Mapping[str, str]:
    '''
    Some locations (e.g. /meta/ and mountpoints) should be off-limits to
    writes by RPMs.  We enforce that by bind-mounting an empty file or
    directory on top of each one of them.
    '''
    with tempfile.TemporaryDirectory() as td_name, \
            tempfile.NamedTemporaryFile() as tf:
        # NB: There may be duplicates in protected_paths, so we normalize.
        # If the duplicates include both a file and a directory, this picks
        # one arbitrarily, and if the type on disk is different, we will
        # fail at mount time.  This doesn't seem worth an explicit check.
        yield {
            os.path.normpath(p): (td_name if p.endswith('/') else tf.name)
                for p in protected_paths
        }
        # NB: The bind mount is read-only, so this is just paranoia.  If it
        # were left RW, we'd need to check its owner / permissions too.
        for expected, actual in (
            ([], os.listdir(td_name)),
            (b'', tf.read()),
        ):
            assert expected == actual, \
                f'Some RPM wrote {actual} to {protected_paths}' 
Example 3
Project: fs_image   Author: facebookincubator   File: repo_server.py    MIT License 7 votes vote down vote up
def read_snapshot_dir(path: Path):
    db_path = path / 'snapshot.sql3'
    assert os.path.exists(db_path), f'no {db_path}, use rpm_repo_snapshot()'
    location_to_obj = add_snapshot_db_objs(sqlite3.connect(db_path))
    for repo in os.listdir(path / 'repos'):
        # Make JSON metadata for the repo's GPG keys.
        key_dir = path / 'repos' / repo / 'gpg_keys'
        for key_filename in os.listdir(key_dir.decode()):
            with open(key_dir / key_filename, 'rb') as infile:
                key_content = infile.read()
            location_to_obj[os.path.join(repo.decode(), key_filename)] = {
                'size': len(key_content),
                # We don't have a good timestamp for these, so set it to
                # "now".  Caching efficiency losses should be negligible :)
                'build_timestamp': int(time.time()),
                'content_bytes': key_content,  # Instead of `storage_id`
            }
    return location_to_obj 
Example 4
Project: pyblish-win   Author: pyblish   File: reindent.py    GNU Lesser General Public License v3.0 7 votes vote down vote up
def check(file):
    if os.path.isdir(file) and not os.path.islink(file):
        if verbose:
            print "listing directory", file
        names = os.listdir(file)
        for name in names:
            fullname = os.path.join(file, name)
            if ((recurse and os.path.isdir(fullname) and
                 not os.path.islink(fullname) and
                 not os.path.split(fullname)[1].startswith("."))
                or name.lower().endswith(".py")):
                check(fullname)
        return

    if verbose:
        print "checking", file, "...",
    try:
        f = io.open(file)
    except IOError, msg:
        errprint("%s: I/O Error: %s" % (file, str(msg)))
        return 
Example 5
Project: unicorn-hat-hd   Author: pimoroni   File: weather-icons.py    MIT License 6 votes vote down vote up
def help():
    print("""
    Usage:
    python weather-icons.py options

    options:
    loop
    image-file.png

    example:
    weather-icons.py loop
    weather-icons.py clear-day.png

    try one of the files from this list:
    {}
    """.format(', '.join(os.listdir(folder_path)))) 
Example 6
Project: unicorn-hat-hd   Author: pimoroni   File: weather-icons.py    MIT License 6 votes vote down vote up
def loop():

    print('Looping through all images in folder {}\n'
          'CRL+C to skip image'.format(folder_path))

    try:

        for img_file in os.listdir(folder_path):

            if img_file.endswith(icon_extension):

                print('Drawing image: {}'.format(folder_path + img_file))

                img = Image.open(folder_path + img_file)

                draw_animation(img)

            else:

                print('Not using this file, might be not an image: {}'.format(img_file))

    except KeyboardInterrupt:
        unicorn.off()

    unicorn.off() 
Example 7
Project: unicorn-hat-hd   Author: pimoroni   File: weather-icons.py    MIT License 6 votes vote down vote up
def weather_icons():
    try:

        if argv[1] == 'loop':

            loop()

        elif argv[1] in os.listdir(folder_path):

            print('Drawing Image: {}'.format(argv[1]))

            img = Image.open(folder_path + argv[1])

            draw_animation(img)
            unicorn.off()

        else:
            help()

    except IndexError:
        help() 
Example 8
Project: Gurux.DLMS.Python   Author: Gurux   File: GXManufacturerCollection.py    GNU General Public License v2.0 6 votes vote down vote up
def readManufacturerSettings(cls, manufacturers, path):
        # pylint: disable=broad-except
        manufacturers = []
        files = [f for f in listdir(path) if isfile(join(path, f))]
        if files:
            for it in files:
                if it.endswith(".obx"):
                    try:
                        manufacturers.append(cls.__parse(os.path.join(path, it)))
                    except Exception as e:
                        print(e)
                        continue

    #
    # Serialize manufacturer from the xml.
    #
    # @param in
    #            Input stream.
    # Serialized manufacturer.
    # 
Example 9
Project: fs_image   Author: facebookincubator   File: temp_snapshot.py    MIT License 6 votes vote down vote up
def _make_test_yum_dnf_conf(
    yum_dnf: str, repos_path: Path, gpg_key_path: Path,
) -> str:
    return textwrap.dedent(f'''\
        [main]
        cachedir=/var/cache/{yum_dnf}
        debuglevel=2
        keepcache=1
        logfile=/var/log/{yum_dnf}.log
        pkgpolicy=newest
        showdupesfromrepos=1
    ''') + '\n\n'.join(
        textwrap.dedent(f'''\
            [{repo}]
            baseurl={(repos_path / repo).file_url()}
            enabled=1
            name={repo}
            gpgkey={gpg_key_path.file_url()}
        ''') for repo in os.listdir(repos_path.decode())
            if repo not in ('dnf.conf', 'yum.conf')
    ) 
Example 10
Project: fs_image   Author: facebookincubator   File: test_subvolume_garbage_collector.py    MIT License 6 votes vote down vote up
def test_garbage_collect_and_make_new_subvolume(self):
        with self._gc_test_case() as n, \
             tempfile.TemporaryDirectory() as json_dir:
            sgc.subvolume_garbage_collector([
                '--refcounts-dir', n.refs_dir,
                '--subvolumes-dir', n.subs_dir,
                '--new-subvolume-wrapper-dir', 'new:subvol',
                '--new-subvolume-json', os.path.join(json_dir, 'OUT'),
            ])
            self.assertEqual(['OUT'], os.listdir(json_dir))
            self.assertEqual(
                n.kept_refs | {'new:subvol.json'}, set(os.listdir(n.refs_dir)),
            )
            self.assertEqual(
                n.kept_subs | {'new:subvol'}, set(os.listdir(n.subs_dir)),
            ) 
Example 11
Project: fs_image   Author: facebookincubator   File: test_fs_utils.py    MIT License 6 votes vote down vote up
def test_path_decode(self):
        with tempfile.TemporaryDirectory() as td:
            bad_utf_path = Path(td) / _BAD_UTF
            self.assertTrue(bad_utf_path.endswith(b'/' + _BAD_UTF))
            with open(bad_utf_path, 'w'):
                pass
            res = subprocess.run([
                sys.executable, '-c', f'import os;print(os.listdir({repr(td)}))'
            ], stdout=subprocess.PIPE)
            # Path's handling of invalid UTF-8 matches the default for
            # Python3 when it gets such data from the filesystem.
            self.assertEqual(
                # Both evaluate to surrogate-escaped ['\udcc3('] plus a newline.
                repr([bad_utf_path.basename().decode()]) + '\n',
                res.stdout.decode(),
            ) 
Example 12
Project: leapp-repository   Author: oamg   File: test_lib_backup.py    Apache License 2.0 6 votes vote down vote up
def test_backup_file():
    tmpdir = tempfile.mkdtemp()
    try:
        file_path = os.path.join(tmpdir, 'foo-bar')
        content = 'test content\n'
        with open(file_path, 'w') as f:
            f.write(content)

        backup_path = backup_file(file_path)

        assert os.path.basename(backup_path) == 'foo-bar.leapp-backup'
        assert os.path.dirname(backup_path) == tmpdir
        assert len(os.listdir(tmpdir)) == 2
        st = os.stat(backup_path)
        assert stat.S_IMODE(st.st_mode) == (stat.S_IRUSR | stat.S_IWUSR)
        with open(backup_path, 'r') as f:
            backup_content = f.read()
        assert backup_content == content
        with open(file_path, 'r') as f:
            orig_content = f.read()
        assert orig_content == content
    finally:
        shutil.rmtree(tmpdir) 
Example 13
Project: leapp-repository   Author: oamg   File: test_lib_backup.py    Apache License 2.0 6 votes vote down vote up
def test_backup_file_target_exists():
    tmpdir = tempfile.mkdtemp()
    try:
        file_path = os.path.join(tmpdir, 'foo-bar')
        primary_target_path = '%s.leapp-backup' % file_path
        primary_target_content = 'do not overwrite me'
        content = 'test_content\n'
        with open(file_path, 'w') as f:
            f.write(content)
        with open(primary_target_path, 'w') as f:
            f.write(primary_target_content)

        backup_path = backup_file(file_path)

        assert os.path.basename(backup_path).startswith('foo-bar.leapp-backup.')
        assert os.path.dirname(backup_path) == tmpdir
        assert len(os.listdir(tmpdir)) == 3
        st = os.stat(backup_path)
        assert stat.S_IMODE(st.st_mode) == (stat.S_IRUSR | stat.S_IWUSR)
        with open(backup_path, 'r') as f:
            assert f.read() == content
        with open(primary_target_path, 'r') as f:
            assert f.read() == primary_target_content
    finally:
        shutil.rmtree(tmpdir) 
Example 14
Project: leapp-repository   Author: oamg   File: test_converter.py    Apache License 2.0 6 votes vote down vote up
def test_match(self):

        for f in [fe for fe in os.listdir(NTP_MATCH_DIR) if fe.endswith('conf')]:
            # get recorded actual result
            num = f.split('.')[0].split('_')[0]
            ntp_conf = os.path.join(NTP_MATCH_DIR, f)
            step_tickers = self._check_existance(
                    os.path.join(NTP_MATCH_DIR, '%s_step_tickers' % num))
            config = ntp2chrony.NtpConfiguration('',
                                                 ntp_conf,
                                                 step_tickers=step_tickers)
            potential_chrony_keys = os.path.join(CHRONY_MATCH_DIR, "%s_chrony.keys" % num)
            actual_data = config.get_chrony_conf(chrony_keys_path=potential_chrony_keys)
            expected_fname = os.path.join(CHRONY_MATCH_DIR, "%s_chrony.conf" % num)
            # make sure recorded and generated configs match
            self._do_match(expected_fname, actual_data)
            actual_keys = config.get_chrony_keys()
            expected_keys_file = self._check_existance(potential_chrony_keys)
            # if keys are recorded or generated make sure they match
            if actual_keys and expected_keys_file != '':
                self._do_match(expected_keys_file, actual_keys) 
Example 15
Project: leapp-repository   Author: oamg   File: library.py    Apache License 2.0 6 votes vote down vote up
def _get_parsed_configs(read_func=utils.read_file, listdir=os.listdir):
    res = []
    try:
        for fname in listdir(utils.VSFTPD_CONFIG_DIR):
            path = os.path.join(utils.VSFTPD_CONFIG_DIR, fname)
            if not path.endswith('.conf'):
                continue
            content = utils.get_config_contents(path, read_func=read_func)
            if content is None:
                continue
            parsed = _parse_config(path, content)
            if parsed is not None:
                res.append((path, parsed))
    except OSError as e:
        if e.errno != errno.ENOENT:
            api.current_logger().warning('Failed to read vsftpd configuration directory: %s'
                                         % e)
    return res 
Example 16
Project: leapp-repository   Author: oamg   File: rhsm.py    Apache License 2.0 6 votes vote down vote up
def get_existing_product_certificates(context, rhsm_info):
    """
    Retrieves information about existing product certificates on the system.

    :param context: An instance of a mounting.IsolatedActions class
    :type context: mounting.IsolatedActions class
    :param rhsm_info: An instance of a RHSMInfo derived model.
    :type rhsm_info: RHSMInfo derived model
    """
    if not rhsm_info.existing_product_certificates:
        for path in ('/etc/pki/product', '/etc/pki/product-default'):
            if not os.path.isdir(context.full_path(path)):
                continue
            certs = [os.path.join(path, f) for f in os.listdir(context.full_path(path))
                     if os.path.isfile(os.path.join(context.full_path(path), f))]
            if not certs:
                continue
            rhsm_info.existing_product_certificates.extend(certs) 
Example 17
Project: godot-mono-builds   Author: godotengine   File: android.py    MIT License 6 votes vote down vote up
def android_autodetect_cmake(opts: AndroidOpts) -> str:
    from distutils.version import LooseVersion
    from os import listdir

    sdk_cmake_basedir = path_join(opts.android_sdk_root, 'cmake')
    versions = []

    for entry in listdir(sdk_cmake_basedir):
        if os.path.isdir(path_join(sdk_cmake_basedir, entry)):
            try:
                version = LooseVersion(entry)
                versions += [version]
            except ValueError:
                continue # Not a version folder

    if len(versions) == 0:
        raise BuildError('Cannot auto-detect Android CMake version')

    lattest_version = str(sorted(versions)[-1])
    print('Auto-detected Android CMake version: ' + lattest_version)

    return lattest_version 
Example 18
Project: alfred-yubikey-otp   Author: robertoriv   File: workflow.py    MIT License 6 votes vote down vote up
def _delete_directory_contents(self, dirpath, filter_func):
        """Delete all files in a directory.

        :param dirpath: path to directory to clear
        :type dirpath: ``unicode`` or ``str``
        :param filter_func function to determine whether a file shall be
            deleted or not.
        :type filter_func ``callable``

        """
        if os.path.exists(dirpath):
            for filename in os.listdir(dirpath):
                if not filter_func(filename):
                    continue
                path = os.path.join(dirpath, filename)
                if os.path.isdir(path):
                    shutil.rmtree(path)
                else:
                    os.unlink(path)
                self.logger.debug('deleted : %r', path) 
Example 19
Project: pyblish-win   Author: pyblish   File: webchecker.py    GNU Lesser General Public License v3.0 6 votes vote down vote up
def open_file(self, url):
        path = urllib.url2pathname(urllib.unquote(url))
        if os.path.isdir(path):
            if path[-1] != os.sep:
                url = url + '/'
            indexpath = os.path.join(path, "index.html")
            if os.path.exists(indexpath):
                return self.open_file(url + "index.html")
            try:
                names = os.listdir(path)
            except os.error, msg:
                exc_type, exc_value, exc_tb = sys.exc_info()
                raise IOError, msg, exc_tb
            names.sort()
            s = MyStringIO("file:"+url, {'content-type': 'text/html'})
            s.write('<BASE HREF="file:%s">\n' %
                    urllib.quote(os.path.join(path, "")))
            for name in names:
                q = urllib.quote(name)
                s.write('<A HREF="%s">%s</A>\n' % (q, q))
            s.seek(0)
            return s 
Example 20
Project: pyblish-win   Author: pyblish   File: PyncheWidget.py    GNU Lesser General Public License v3.0 6 votes vote down vote up
def make_view_popups(switchboard, root, extrapath):
    viewers = []
    # where we are in the file system
    dirs = [os.path.dirname(__file__)] + extrapath
    for dir in dirs:
        if dir == '':
            dir = '.'
        for file in os.listdir(dir):
            if file[-9:] == 'Viewer.py':
                name = file[:-3]
                try:
                    module = __import__(name)
                except ImportError:
                    # Pynche is running from inside a package, so get the
                    # module using the explicit path.
                    pkg = __import__('pynche.'+name)
                    module = getattr(pkg, name)
                if hasattr(module, 'ADDTOVIEW') and module.ADDTOVIEW:
                    # this is an external viewer
                    v = PopupViewer(module, name, switchboard, root)
                    viewers.append(v)
    # sort alphabetically
    viewers.sort()
    return viewers 
Example 21
Project: pyblish-win   Author: pyblish   File: ftpmirror.py    GNU Lesser General Public License v3.0 6 votes vote down vote up
def remove(fullname):
    if os.path.isdir(fullname) and not os.path.islink(fullname):
        try:
            names = os.listdir(fullname)
        except os.error:
            names = []
        ok = 1
        for name in names:
            if not remove(os.path.join(fullname, name)):
                ok = 0
        if not ok:
            return 0
        try:
            os.rmdir(fullname)
        except os.error, msg:
            print "Can't remove local directory %r: %s" % (fullname, msg)
            return 0 
Example 22
Project: pyblish-win   Author: pyblish   File: byext.py    GNU Lesser General Public License v3.0 6 votes vote down vote up
def statdir(self, dir):
        self.addstats("<dir>", "dirs", 1)
        try:
            names = sorted(os.listdir(dir))
        except os.error as err:
            sys.stderr.write("Can't list %s: %s\n" % (dir, err))
            self.addstats("<dir>", "unlistable", 1)
            return
        for name in names:
            if name.startswith(".#"):
                continue # Skip CVS temp files
            if name.endswith("~"):
                continue# Skip Emacs backup files
            full = os.path.join(dir, name)
            if os.path.islink(full):
                self.addstats("<lnk>", "links", 1)
            elif os.path.isdir(full):
                self.statdir(full)
            else:
                self.statfile(full) 
Example 23
Project: pyblish-win   Author: pyblish   File: cleanfuture.py    GNU Lesser General Public License v3.0 6 votes vote down vote up
def check(file):
    if os.path.isdir(file) and not os.path.islink(file):
        if verbose:
            print "listing directory", file
        names = os.listdir(file)
        for name in names:
            fullname = os.path.join(file, name)
            if ((recurse and os.path.isdir(fullname) and
                 not os.path.islink(fullname))
                or name.lower().endswith(".py")):
                check(fullname)
        return

    if verbose:
        print "checking", file, "...",
    try:
        f = open(file)
    except IOError, msg:
        errprint("%r: I/O Error: %s" % (file, str(msg)))
        return 
Example 24
Project: pyblish-win   Author: pyblish   File: checkappend.py    GNU Lesser General Public License v3.0 6 votes vote down vote up
def check(file):
    if os.path.isdir(file) and not os.path.islink(file):
        if verbose:
            print "%r: listing directory" % (file,)
        names = os.listdir(file)
        for name in names:
            fullname = os.path.join(file, name)
            if ((os.path.isdir(fullname) and
                 not os.path.islink(fullname))
                or os.path.normcase(name[-3:]) == ".py"):
                check(fullname)
        return

    try:
        f = open(file)
    except IOError, msg:
        errprint("%r: I/O Error: %s" % (file, msg))
        return 
Example 25
Project: pyblish-win   Author: pyblish   File: test_discovery.py    GNU Lesser General Public License v3.0 6 votes vote down vote up
def test_discover_with_modules_that_fail_to_import(self):
        loader = unittest.TestLoader()

        listdir = os.listdir
        os.listdir = lambda _: ['test_this_does_not_exist.py']
        isfile = os.path.isfile
        os.path.isfile = lambda _: True
        orig_sys_path = sys.path[:]
        def restore():
            os.path.isfile = isfile
            os.listdir = listdir
            sys.path[:] = orig_sys_path
        self.addCleanup(restore)

        suite = loader.discover('.')
        self.assertIn(os.getcwd(), sys.path)
        self.assertEqual(suite.countTestCases(), 1)
        test = list(list(suite)[0])[0] # extract test from suite

        with self.assertRaises(ImportError):
            test.test_this_does_not_exist() 
Example 26
Project: pyblish-win   Author: pyblish   File: test_main.py    GNU Lesser General Public License v3.0 6 votes vote down vote up
def test_filename_changing_on_output_two_files(self):
        """2to3 two files in one directory with a new output dir."""
        self.setup_test_source_trees()
        err = StringIO.StringIO()
        py2_files = [self.trivial_py2_file, self.init_py2_file]
        expected_files = set(os.path.basename(name) for name in py2_files)
        ret = self.run_2to3_capture(
                ["-n", "-w", "--write-unchanged-files",
                 "--no-diffs", "--output-dir", self.py3_dest_dir] + py2_files,
                StringIO.StringIO(""), StringIO.StringIO(), err)
        self.assertEqual(ret, 0)
        stderr = err.getvalue()
        self.assertIn(
                "Output in %r will mirror the input directory %r layout" % (
                        self.py3_dest_dir, self.py2_src_dir), stderr)
        self.assertEqual(expected_files, set(os.listdir(self.py3_dest_dir))) 
Example 27
Project: fs_image   Author: facebookincubator   File: update_package_db.py    MIT License 5 votes vote down vote up
def _read_json_dir_db(path: Path) -> PackageTagDb:
    db = {}
    for package in os.listdir(path):
        tag_to_info = db.setdefault(package.decode(), {})
        for tag_json in os.listdir(path / package):
            tag_json = tag_json.decode()
            assert tag_json.endswith(_JSON), (path, package, tag_json)
            with open(path / package / tag_json) as infile:
                _read_generated_header(infile)
                tag_to_info[tag_json[:-len(_JSON)]] = json.load(infile)
    return db 
Example 28
Project: fs_image   Author: facebookincubator   File: temp_snapshot.py    MIT License 5 votes vote down vote up
def make_temp_snapshot(
    repos, out_dir, gpg_key_path, gpg_key_whitelist_dir,
) -> Path:
    'Generates temporary RPM repo snapshots for tests to use as inputs.'
    snapshot_dir = out_dir / 'temp_snapshot_dir'
    os.mkdir(snapshot_dir)

    with temp_repos_steps(repo_change_steps=[repos]) as repos_root:
        snapshot_repos(
            dest=snapshot_dir,
            # Snapshot the 0th step only, since only that is defined
            yum_conf_content=_make_test_yum_dnf_conf(
                'yum', repos_root / '0', gpg_key_path,
            ),
            dnf_conf_content=_make_test_yum_dnf_conf(
                'dnf', repos_root / '0', gpg_key_path,
            ),
            repo_db_ctx=RepoDBContext(
                DBConnectionContext.make(
                    kind='sqlite', db_path=(out_dir / 'db.sqlite3').decode(),
                ),
                SQLDialect.SQLITE3,
            ),
            storage=Storage.make(
                key='test',
                kind='filesystem',
                base_dir=(out_dir / 'storage').decode(),
            ),
            rpm_shard=RpmShard(shard=0, modulo=1),
            gpg_key_whitelist_dir=no_gpg_keys_yet,
            retries=0,  # Nothing here should require retries, it's a bug.
        )

    # Merge the repo snapshot with the storage & RPM DB -- this makes our
    # test snapshot build target look very much like prod snapshots.
    for f in os.listdir(snapshot_dir):
        assert not os.path.exists(out_dir / f), f'Must not overwrite {f}'
        os.rename(snapshot_dir / f, out_dir / f)
    os.rmdir(snapshot_dir) 
Example 29
Project: fs_image   Author: facebookincubator   File: test_parse_repodata.py    MIT License 5 votes vote down vote up
def _listdir(path: Path) -> Set[Path]:
    return {path / p for p in os.listdir(path)} 
Example 30
Project: fs_image   Author: facebookincubator   File: test_gpg_keys.py    MIT License 5 votes vote down vote up
def test_snapshot_gpg_keys(self):
        with temp_dir() as td:
            hello_path = td / 'hello'
            with open(hello_path, 'w') as out_f:
                out_f.write('world')

            whitelist_dir = td / 'whitelist'
            os.mkdir(whitelist_dir)

            def try_snapshot(snapshot_dir):
                snapshot_gpg_keys(
                    key_urls=[hello_path.file_url()],
                    whitelist_dir=whitelist_dir,
                    snapshot_dir=snapshot_dir,
                )

            # The snapshot won't work until the key is correctly whitelisted.
            with temp_dir() as snap_dir, self.assertRaises(FileNotFoundError):
                try_snapshot(snap_dir)
            with open(whitelist_dir / 'hello', 'w') as out_f:
                out_f.write('wrong contents')
            with temp_dir() as snap_dir, self.assertRaises(AssertionError):
                try_snapshot(snap_dir)
            shutil.copy(hello_path, whitelist_dir)

            with temp_dir() as snapshot_dir:
                try_snapshot(snapshot_dir)
                self.assertEqual([b'gpg_keys'], os.listdir(snapshot_dir))
                self.assertEqual(
                    [b'hello'], os.listdir(snapshot_dir / 'gpg_keys'),
                )
                with open(snapshot_dir / 'gpg_keys/hello') as in_f:
                    self.assertEqual('world', in_f.read()) 
Example 31
Project: fs_image   Author: facebookincubator   File: test_filesystem_storage.py    MIT License 5 votes vote down vote up
def test_uncommitted(self):
        with self._temp_storage() as storage:
            self.assertEqual([], os.listdir(storage.base_dir))
            with storage.writer() as writer:
                writer.write(b'foo')
            self.assertEqual([], os.listdir(storage.base_dir))
            with self.assertRaisesRegex(RuntimeError, '^abracadabra$'):
                with storage.writer() as writer:
                    raise RuntimeError('abracadabra')
            self.assertEqual([], os.listdir(storage.base_dir)) 
Example 32
Project: fs_image   Author: facebookincubator   File: test_subvolume_garbage_collector.py    MIT License 5 votes vote down vote up
def test_garbage_collect_subvolumes(self):
        for fn in [
            lambda n: sgc.garbage_collect_subvolumes(n.refs_dir, n.subs_dir),
            self._gc_only,
        ]:
            with self._gc_test_case() as n:
                fn(n)
                self.assertEqual(n.kept_refs, set(os.listdir(n.refs_dir)))
                self.assertEqual(n.kept_subs, set(os.listdir(n.subs_dir))) 
Example 33
Project: fs_image   Author: facebookincubator   File: test_subvolume_garbage_collector.py    MIT License 5 votes vote down vote up
def test_no_gc_due_to_lock(self):
        with self._gc_test_case() as n:
            fd = os.open(n.subs_dir, os.O_RDONLY)
            try:
                fcntl.flock(fd, fcntl.LOCK_SH | fcntl.LOCK_NB)
                self._gc_only(n)

                # Sneak in a test that new subvolume creation fails when
                # its refcount already exists.
                with tempfile.TemporaryDirectory() as json_dir, \
                     self.assertRaisesRegex(
                         RuntimeError, 'Refcount already exists:',
                     ):
                    sgc.subvolume_garbage_collector([
                        '--refcounts-dir', n.refs_dir,
                        '--subvolumes-dir', n.subs_dir,
                        # This refcount was created by `_gc_test_case`.
                        '--new-subvolume-wrapper-dir', '3link:1',
                        '--new-subvolume-json', os.path.join(json_dir, 'OUT'),
                    ])

            finally:
                    os.close(fd)

            self.assertEqual(
                n.kept_refs | n.gcd_refs, set(os.listdir(n.refs_dir))
            )
            self.assertEqual(
                n.kept_subs | n.gcd_subs, set(os.listdir(n.subs_dir))
            ) 
Example 34
Project: fs_image   Author: facebookincubator   File: test_fs_utils.py    MIT License 5 votes vote down vote up
def _check_has_one_file(self, dir_path, filename, contents):
        self.assertEqual([filename.encode()], os.listdir(dir_path))
        with open(dir_path / filename) as in_f:
            self.assertEqual(contents, in_f.read()) 
Example 35
Project: fs_image   Author: facebookincubator   File: procfs_serde.py    MIT License 5 votes vote down vote up
def deserialize_untyped(subvol, path_with_ext: str) -> Any:
    # NB: while `isdir` and `isfile` do follow symbolic links, `subvol.path`
    # will prevent the use of symlinks that take us outside the subvol.
    if os.path.isdir(subvol.path(path_with_ext)):
        return {
            k: deserialize_untyped(subvol, os.path.join(path_with_ext, k))
                for k in os.listdir(subvol.path(path_with_ext).decode())
        }
    elif os.path.isfile(subvol.path(path_with_ext)):
        with open(subvol.path(path_with_ext), 'rb') as f:
            s = f.read()

        _, ext = os.path.splitext(path_with_ext)
        if ext == '.bin':
            return s

        # All other extensions had a trailing newline appended.
        if not s.endswith(b'\n'):
            raise AssertionError(
                f'{path_with_ext} must have had a trailing newline, got {s}'
            )
        s = s[:-1]

        if ext == '.image_path' or ext == '.host_path':
            return s
        elif ext == '':
            return s.decode()
        else:
            raise AssertionError(f'Unsupported extension (path_with_ext)')
    else:
        raise AssertionError(f'{path_with_ext} is neither a file nor a dir') 
Example 36
Project: leapp-repository   Author: oamg   File: actor.py    Apache License 2.0 5 votes vote down vote up
def process(self):
        location = self.get_folder_path('bundled-rpms')
        local_rpms = []
        for name in os.listdir(location):
            if name.endswith('.rpm'):
                # It is important to put here the realpath to the files here, because
                # symlinks cannot be resolved properly inside of the target userspace since they use the /installroot
                # mount target
                local_rpms.append(os.path.realpath(os.path.join(location, name)))
        if local_rpms:
            self.produce(RpmTransactionTasks(local_rpms=local_rpms)) 
Example 37
Project: leapp-repository   Author: oamg   File: library.py    Apache License 2.0 5 votes vote down vote up
def get_vsftpd_facts(read_func=utils.read_file, listdir=os.listdir):
    config_hash = utils.get_default_config_hash(read_func=read_func)
    configs = _get_parsed_configs(read_func=read_func, listdir=listdir)
    res_configs = []
    for path, config in configs:
        res_configs.append(VsftpdConfig(path=path,
                                        strict_ssl_read_eof=config.get(utils.STRICT_SSL_READ_EOF),
                                        tcp_wrappers=config.get(utils.TCP_WRAPPERS)))
    return VsftpdFacts(default_config_hash=config_hash, configs=res_configs) 
Example 38
Project: leapp-repository   Author: oamg   File: modscan.py    Apache License 2.0 5 votes vote down vote up
def _create_dracut_modules():
    dracut_base_path = api.get_actor_folder_path('dracut')
    if dracut_base_path:
        dracut_base_path = os.path.abspath(dracut_base_path)
        for module in os.listdir(dracut_base_path):
            yield UpgradeDracutModule(
                name=re.sub(r'^\d+', '', module),
                module_path=os.path.join(dracut_base_path, module)
            ) 
Example 39
Project: leapp-repository   Author: oamg   File: actor.py    Apache License 2.0 5 votes vote down vote up
def process(self):
        facts = FirewalldFacts()

        try:
            tree = ElementTree.parse('/etc/firewalld/lockdown-whitelist.xml')
            root = tree.getroot()
            facts.firewall_config_command = private.getLockdownFirewallConfigCommand(root)
        except IOError:
            pass

        try:
            tree = ElementTree.parse('/etc/firewalld/direct.xml')
            root = tree.getroot()
            facts.ebtablesTablesInUse = private.getEbtablesTablesInUse(root)
        except IOError:
            pass

        ipsetTypesInUse = set()
        directory = '/etc/firewalld/ipsets'
        try:
            for filename in os.listdir(directory):
                if not filename.endswith('.xml'):
                    continue
                try:
                    tree = ElementTree.parse(os.path.join(directory, filename))
                    root = tree.getroot()
                    ipsetTypesInUse |= set(private.getIpsetTypesInUse(root))
                except IOError:
                    pass
            facts.ipsetTypesInUse = list(ipsetTypesInUse)
        except OSError:
            pass

        self.produce(facts) 
Example 40
Project: leapp-repository   Author: oamg   File: library.py    Apache License 2.0 5 votes vote down vote up
def check_files_for_compressed_ipv6():
    conf_files = get_conf_files()
    migrate_files = []
    files = [os.path.join(SendmailConfDir, re.sub(r'\.db$', '', f)) for f in os.listdir(SendmailConfDir)
             if f.endswith('.db')] + conf_files
    regex = re.compile(r'IPv6:[0-9a-fA-F:]*::')
    for filename in files:
        if not os.path.exists(filename):
            continue
        with open(filename) as file_check:
            for line in file_check:
                if regex.search(line) and not check_false_positives(os.path.basename(filename), line):
                    migrate_files.append(filename)
                    break
    return migrate_files 
Example 41
Project: PEAKachu   Author: tbischler   File: replicons.py    ISC License 5 votes vote down vote up
def _check_annotations(self):
        if self._gff_folder is None:
            print("No folder with .gff files specified")
        else:
            gff_files = [join(self._gff_folder, f) for f in listdir(
                self._gff_folder) if isfile(join(self._gff_folder, f))]
            if not gff_files:
                print("No .gff file found in specified folder")
            else:
                for gff_file in gff_files:
                    self._store_annotations(gff_file) 
Example 42
Project: PEAKachu   Author: tbischler   File: consensus_peak.py    ISC License 5 votes vote down vote up
def _store_peaks(self):
        peak_table_folder = "{}/peak_tables".format(self._project_folder)
        peak_files = [join(peak_table_folder, f) for f in listdir(
            peak_table_folder) if isfile(join(peak_table_folder, f))]
        for peak_file in peak_files:
            peak_df = pd.read_table(peak_file, sep='\t')
            for peak in peak_df.to_dict("records"):
                self._replicon_peak_dict[peak["replicon"]][
                    peak["peak_strand"]].add(
                        (peak["peak_start"], peak["peak_end"])) 
Example 43
Project: PEAKachu   Author: tbischler   File: consensus_peak.py    ISC License 5 votes vote down vote up
def _get_peak_coverage(self):
        norm_coverage_folder = "{}/normalized_coverage".format(
            self._project_folder)
        coverage_files = [join(norm_coverage_folder, f) for f in listdir(
            norm_coverage_folder) if isfile(join(norm_coverage_folder, f))]
        wiggle_parser = WiggleParser()
        cons_value_dict = defaultdict(dict)
        for coverage_file in coverage_files:
            cons_values = np.zeros(self._consensus_length)
            with open(coverage_file, 'r') as cov_fh:
                for wiggle_entry in wiggle_parser.entries(cov_fh):
                    lib_name_and_strand = wiggle_entry.track_name
                    lib_name = '_'.join(lib_name_and_strand.split('_')[:-1])
                    lib_strand = '+' if lib_name_and_strand.split(
                        '_')[-1] == "forward" else '-'
                    replicon = wiggle_entry.replicon
                    pos_value_pairs = dict(wiggle_entry.pos_value_pairs)
                    self._get_coverage_for_replicon_peaks(
                        replicon, lib_strand, pos_value_pairs, cons_values)
            cons_value_dict[lib_name][lib_strand] = cons_values
        # combine strands
        comb_cons_value_dict = {}
        for lib in cons_value_dict:
            comb_cons_value_dict[lib] = np.zeros(self._consensus_length)
            for strand in cons_value_dict[lib]:
                comb_cons_value_dict[lib] += cons_value_dict[lib][strand]
        return comb_cons_value_dict 
Example 44
Project: meta-transfer-learning   Author: erfaneshrati   File: miniimagenet.py    MIT License 5 votes vote down vote up
def _read_classes(dir_path):
    """
    Read the WNID directories in a directory.
    """
    return [ImageNetClass(os.path.join(dir_path, f)) for f in os.listdir(dir_path)
            if f.startswith('n')]

# pylint: disable=R0903 
Example 45
Project: meta-transfer-learning   Author: erfaneshrati   File: miniimagenet.py    MIT License 5 votes vote down vote up
def sample(self, num_images):
        """
        Sample images (as numpy arrays) from the class.

        Returns:
          A sequence of 84x84x3 numpy arrays.
          Each pixel ranges from 0 to 1.
        """
        names = [f for f in os.listdir(self.dir_path) if f.endswith('.JPEG')]
        random.shuffle(names)
        images = []
        for name in names[:num_images]:
            images.append(self._read_image(name))
        return images 
Example 46
Project: Wide-Residual-Nets-for-SETI   Author: sgrvinod   File: average_scores.py    Apache License 2.0 5 votes vote down vote up
def average_scores(input_folder, output_path):
    """
    Averages scores of several CSV files generated by test.py

    Args:
        input_folder (path): folder with models' scores' CSVs in it.
        output_path (path): path of output CSV file with averaged scores, ready for submission to SETI scoreboards
    """
    csv_files = [f for f in os.listdir(input_folder) if f.endswith('.csv')]
    model_scores = []
    for i, csv in enumerate(csv_files):
        df = pd.read_csv(os.path.join(input_folder, csv), index_col=0, header=None)
        if i == 0:
            index = df.index
        else:
            assert index.equals(df.index), "Indices of one or more files do not match!"
        model_scores.append(df)
    print "Read %d files. Averaging..." % len(model_scores)

    concat_scores = pd.concat(model_scores)
    averaged_scores = concat_scores.groupby(level=0).mean()
    assert averaged_scores.shape[0] == len(list(index)), "Something went wrong when concatenating/averaging!"
    averaged_scores = averaged_scores.reindex(index)

    averaged_scores.to_csv(output_path, header=False, index=True)
    print "Averaged scores saved to %s" % output_path 
Example 47
Project: gog-galaxy-plugin-downloader   Author: Slashbunny   File: download.py    GNU General Public License v3.0 5 votes vote down vote up
def fix_plugin_directories(dest):
    """
    Loops through all folders in the output directory, reads the their manifest
    file, and renames the directory to the standard <platform>_<guid> format
    """
    # Loop through directories in the destination directory
    for existing_dir in os.listdir(dest):
        existing_path = os.path.join(dest, existing_dir)

        # Skip non-directories
        if not os.path.isdir(existing_path):
            continue

        try:
            with open(os.path.join(existing_path, 'manifest.json')) as m:
                data = json.load(m)
                platform = data['platform']
                guid = data['guid']

                # Close json file
                m.close()

                expected_dir = platform + '_' + guid
                expected_path = os.path.join(dest, expected_dir)

                if existing_path != expected_path:
                    print('NOTICE: Folder should be "{}", but it is named "{}"'
                          .format(expected_dir, existing_dir))

                    if os.path.isdir(expected_path):
                        print('NOTICE: Correct pathed plugin already exists,'
                              + ' deleting extra plugin')
                        shutil.rmtree(existing_path)
                    else:
                        print('NOTICE: Renaming folder to proper name')
                        shutil.move(existing_path, expected_path)
        except (FileNotFoundError, json.decoder.JSONDecodeError, KeyError):
            print('ERROR: Could not read plugin data from {} folder'
                  .format(existing_path)) 
Example 48
Project: gog-galaxy-plugin-downloader   Author: Slashbunny   File: download.py    GNU General Public License v3.0 5 votes vote down vote up
def delete_old_plugins(data, dest):
    """
    Deletes versions of plugins that don't match the yaml manifest. In theory
    this should only be older versions, but any version that doesn't match
    the yaml definition will be deleted

    This explicitly does not touch other directories that do not match the
    known plugin names.

    If the version doesn't match the yaml definition, the directory is removed
    """
    # Loop over each plugin
    for name, data in data.items():
        expected_plugin_dir = name + '_' + data['guid']

        # Loop through directories in the destination directory
        for item in os.listdir(dest):
            full_path = os.path.join(dest, item)

            # Skip non-directories
            if not os.path.isdir(full_path):
                continue

            # Skip directory names that are in the valid plugin directory array
            if item == expected_plugin_dir:
                continue

            # If any other directory begins with <plugin_name>_, delete it
            if item.startswith(name + '_'):
                print('Deleting wrong version "{}" from "{}"'
                      .format(item, dest))
                shutil.rmtree(full_path) 
Example 49
Project: malcode   Author: moonsea   File: filtersamemd5.py    GNU General Public License v3.0 5 votes vote down vote up
def countFile(dirpath, suffix=''):
    return len([x for x in os.listdir(dirpath) if (x.split('.')[-1] == suffix)]) 
Example 50
Project: malcode   Author: moonsea   File: wingenasm.py    GNU General Public License v3.0 5 votes vote down vote up
def countFile(dirpath, suffix=''):
    return len([x for x in os.listdir(dirpath) if (x.split('.')[-1] == suffix)]) 
Example 51
Project: malcode   Author: moonsea   File: virusgenasm.py    GNU General Public License v3.0 5 votes vote down vote up
def countFile(dirpath, suffix=''):
    return len([x for x in os.listdir(dirpath) if (x.split('.')[-1] == suffix)]) 
Example 52
Project: malcode   Author: moonsea   File: creatematrix.py    GNU General Public License v3.0 5 votes vote down vote up
def createFeaMatrix(feafile, tfpath):
    desfiledir = os.path.join(BASEPATH, 'feature')
    checkDir(desfiledir)

    desfilepath = os.path.join(desfiledir, '2_gram_training_feature_new')
    checkFile(desfilepath)

    log('Generating', 'feature list', subpath='classfier')
    with open(feafile) as feafile:
        lines = feafile.readlines()

    fealist = []
    for line in lines:
        fealist.append(line.split('----')[0].strip())

    log('Complete', str(fealist), subpath='classfier')
    # matrixlist = []

    # benignpath = os.path.join(tfpath, 'benign')
    #
    # filelist = os.listdir(benignpath)
    # for item in filelist:
    #     filepath = os.path.join(benignpath, item)
    #     vector = ''.join([item, getVector(fealist, filepath), '1'])
    #     matrixlist.append(vector)

    log('Generating', 'matrix of benign', subpath='classfier')
    createMatrix(desfilepath, fealist, tfpath, 'benign')
    log('Generating', 'matrix of malcode', subpath='classfier')
    createMatrix(desfilepath, fealist, tfpath, 'malcode')

    # with open(desfilepath, 'w') as desfile:
    #     desfile.writelines(matrixlist) 
Example 53
Project: malcode   Author: moonsea   File: creatematrix.py    GNU General Public License v3.0 5 votes vote down vote up
def createMatrix(desfilepath, fealist, tfpath, type):
    tmppath = os.path.join(tfpath, type)
    filelist = os.listdir(tmppath)
    # classname = '1' if (type == 'benign')
    for item in filelist:
        log('Generating', item, subpath='classfier')
        filepath = os.path.join(tmppath, item)
        vector = '----'.join([item, str(getVector(fealist, filepath)).replace('[', '').replace(']', '').replace('\'', ''), '1' if type == 'benign' else '2'])
        with open(desfilepath, 'a+') as desfile:
            desfile.writelines(vector + '\n')
        log('Complete', vector, subpath='classfier')
        # matrixlist.append(vector) 
Example 54
Project: malcode   Author: moonsea   File: benignasm.py    GNU General Public License v3.0 5 votes vote down vote up
def countFile(dirpath, suffix=''):
    return len([x for x in os.listdir(dirpath) if (x.split('.')[-1] == suffix)]) 
Example 55
Project: alfred-yubikey-otp   Author: robertoriv   File: workflow.py    MIT License 5 votes vote down vote up
def decode(self, text, encoding=None, normalization=None):
        """Return ``text`` as normalised unicode.

        If ``encoding`` and/or ``normalization`` is ``None``, the
        ``input_encoding``and ``normalization`` parameters passed to
        :class:`Workflow` are used.

        :param text: string
        :type text: encoded or Unicode string. If ``text`` is already a
            Unicode string, it will only be normalised.
        :param encoding: The text encoding to use to decode ``text`` to
            Unicode.
        :type encoding: ``unicode`` or ``None``
        :param normalization: The nomalisation form to apply to ``text``.
        :type normalization: ``unicode`` or ``None``
        :returns: decoded and normalised ``unicode``

        :class:`Workflow` uses "NFC" normalisation by default. This is the
        standard for Python and will work well with data from the web (via
        :mod:`~workflow.web` or :mod:`json`).

        macOS, on the other hand, uses "NFD" normalisation (nearly), so data
        coming from the system (e.g. via :mod:`subprocess` or
        :func:`os.listdir`/:mod:`os.path`) may not match. You should either
        normalise this data, too, or change the default normalisation used by
        :class:`Workflow`.

        """
        encoding = encoding or self._input_encoding
        normalization = normalization or self._normalizsation
        if not isinstance(text, unicode):
            text = unicode(text, encoding)
        return unicodedata.normalize(normalization, text) 
Example 56
Project: client   Author: Scorched-Moon   File: dialog.py    GNU General Public License v3.0 5 votes vote down vote up
def _list_dir_(self):
        self.input_dir.value = self.curdir
        self.input_dir.pos = len(self.curdir)
        self.input_dir.vpos = 0
        dirs = []
        files = []
        try:
            for i in os.listdir(self.curdir):
                if os.path.isdir(os.path.join(self.curdir, i)): dirs.append(i)
                else: files.append(i)
        except:
            self.input_file.value = "Opps! no access"
        #if '..' not in dirs: dirs.append('..')
        dirs.sort()
        dirs = ['..'] + dirs
        
        files.sort()
        for i in dirs:
            #item = ListItem(image=self.dir_img, text=i, value=i)
            self.list.add(i,image=self.dir_img,value=i)
        for i in files:
            #item = ListItem(image=None, text=i, value=i)
            self.list.add(i,value=i)
        #self.list.resize()
        self.list.set_vertical_scroll(0)
        #self.list.repaintall() 
Example 57
Project: pyblish-win   Author: pyblish   File: pathfix.py    GNU Lesser General Public License v3.0 5 votes vote down vote up
def recursedown(dirname):
    dbg('recursedown(%r)\n' % (dirname,))
    bad = 0
    try:
        names = os.listdir(dirname)
    except os.error, msg:
        err('%s: cannot list directory: %r\n' % (dirname, msg))
        return 1 
Example 58
Project: pyblish-win   Author: pyblish   File: mkreal.py    GNU Lesser General Public License v3.0 5 votes vote down vote up
def mkrealdir(name):
    st = os.stat(name) # Get the mode
    mode = S_IMODE(st[ST_MODE])
    linkto = os.readlink(name)
    files = os.listdir(name)
    os.unlink(name)
    os.mkdir(name, mode)
    os.chmod(name, mode)
    linkto = join(os.pardir, linkto)
    #
    for filename in files:
        if filename not in (os.curdir, os.pardir):
            os.symlink(join(linkto, filename), join(name, filename)) 
Example 59
Project: pyblish-win   Author: pyblish   File: linktree.py    GNU Lesser General Public License v3.0 5 votes vote down vote up
def linknames(old, new, link):
    if debug: print 'linknames', (old, new, link)
    try:
        names = os.listdir(old)
    except os.error, msg:
        print old + ': warning: cannot listdir:', msg
        return 
Example 60
Project: pyblish-win   Author: pyblish   File: finddiv.py    GNU Lesser General Public License v3.0 5 votes vote down vote up
def processdir(dir, listnames):
    try:
        names = os.listdir(dir)
    except os.error, msg:
        sys.stderr.write("Can't list directory: %s\n" % dir)
        return 1 
Example 61
Project: pyblish-win   Author: pyblish   File: lll.py    GNU Lesser General Public License v3.0 5 votes vote down vote up
def lll(dirname):
    for name in os.listdir(dirname):
        if name not in (os.curdir, os.pardir):
            full = os.path.join(dirname, name)
            if os.path.islink(full):
                print name, '->', os.readlink(full) 
Example 62
Project: pyblish-win   Author: pyblish   File: fixcid.py    GNU Lesser General Public License v3.0 5 votes vote down vote up
def recursedown(dirname):
    dbg('recursedown(%r)\n' % (dirname,))
    bad = 0
    try:
        names = os.listdir(dirname)
    except os.error, msg:
        err(dirname + ': cannot list directory: ' + str(msg) + '\n')
        return 1 
Example 63
Project: pyblish-win   Author: pyblish   File: methfix.py    GNU Lesser General Public License v3.0 5 votes vote down vote up
def recursedown(dirname):
    dbg('recursedown(%r)\n' % (dirname,))
    bad = 0
    try:
        names = os.listdir(dirname)
    except os.error, msg:
        err('%s: cannot list directory: %r\n' % (dirname, msg))
        return 1 
Example 64
Project: pyblish-win   Author: pyblish   File: __init__.py    GNU Lesser General Public License v3.0 5 votes vote down vote up
def suite():
    suite = unittest.TestSuite()
    for fn in os.listdir(here):
        if fn.startswith("test") and fn.endswith(".py"):
            modname = "unittest.test." + fn[:-3]
            __import__(modname)
            module = sys.modules[modname]
            suite.addTest(loader.loadTestsFromModule(module))
    return suite 
Example 65
Project: pyblish-win   Author: pyblish   File: test_discovery.py    GNU Lesser General Public License v3.0 5 votes vote down vote up
def setup_module_clash(self):
        class Module(object):
            __file__ = 'bar/foo.py'
        sys.modules['foo'] = Module
        full_path = os.path.abspath('foo')
        original_listdir = os.listdir
        original_isfile = os.path.isfile
        original_isdir = os.path.isdir

        def cleanup():
            os.listdir = original_listdir
            os.path.isfile = original_isfile
            os.path.isdir = original_isdir
            del sys.modules['foo']
            if full_path in sys.path:
                sys.path.remove(full_path)
        self.addCleanup(cleanup)

        def listdir(_):
            return ['foo.py']
        def isfile(_):
            return True
        def isdir(_):
            return True
        os.listdir = listdir
        os.path.isfile = isfile
        os.path.isdir = isdir
        return full_path 
Example 66
Project: pyblish-win   Author: pyblish   File: test_main.py    GNU Lesser General Public License v3.0 5 votes vote down vote up
def test_filename_changing_on_output_single_dir(self):
        """2to3 a single directory with a new output dir and suffix."""
        self.setup_test_source_trees()
        out = StringIO.StringIO()
        err = StringIO.StringIO()
        suffix = "TEST"
        ret = self.run_2to3_capture(
                ["-n", "--add-suffix", suffix, "--write-unchanged-files",
                 "--no-diffs", "--output-dir",
                 self.py3_dest_dir, self.py2_src_dir],
                StringIO.StringIO(""), out, err)
        self.assertEqual(ret, 0)
        stderr = err.getvalue()
        self.assertIn(" implies -w.", stderr)
        self.assertIn(
                "Output in %r will mirror the input directory %r layout" % (
                        self.py3_dest_dir, self.py2_src_dir), stderr)
        self.assertEqual(set(name+suffix for name in self.setup_files),
                         set(os.listdir(self.py3_dest_dir)))
        for name in self.setup_files:
            self.assertIn("Writing converted %s to %s" % (
                    os.path.join(self.py2_src_dir, name),
                    os.path.join(self.py3_dest_dir, name+suffix)), stderr)
        sep = re.escape(os.sep)
        self.assertRegexpMatches(
                stderr, r"No changes to .*/__init__\.py".replace("/", sep))
        self.assertNotRegex(
                stderr, r"No changes to .*/trivial\.py".replace("/", sep)) 
Example 67
Project: pyblish-win   Author: pyblish   File: test_main.py    GNU Lesser General Public License v3.0 5 votes vote down vote up
def test_filename_changing_on_output_single_file(self):
        """2to3 a single file with a new output dir."""
        self.setup_test_source_trees()
        err = StringIO.StringIO()
        ret = self.run_2to3_capture(
                ["-n", "-w", "--no-diffs", "--output-dir", self.py3_dest_dir,
                 self.trivial_py2_file],
                StringIO.StringIO(""), StringIO.StringIO(), err)
        self.assertEqual(ret, 0)
        stderr = err.getvalue()
        self.assertIn(
                "Output in %r will mirror the input directory %r layout" % (
                        self.py3_dest_dir, self.py2_src_dir), stderr)
        self.assertEqual(set([os.path.basename(self.trivial_py2_file)]),
                         set(os.listdir(self.py3_dest_dir))) 
Example 68
Project: pyblish-win   Author: pyblish   File: modulefinder.py    GNU Lesser General Public License v3.0 5 votes vote down vote up
def find_all_submodules(self, m):
        if not m.__path__:
            return
        modules = {}
        # 'suffixes' used to be a list hardcoded to [".py", ".pyc", ".pyo"].
        # But we must also collect Python extension modules - although
        # we cannot separate normal dlls from Python extensions.
        suffixes = []
        for triple in imp.get_suffixes():
            suffixes.append(triple[0])
        for dir in m.__path__:
            try:
                names = os.listdir(dir)
            except os.error:
                self.msg(2, "can't list directory", dir)
                continue
            for name in names:
                mod = None
                for suff in suffixes:
                    n = len(suff)
                    if name[-n:] == suff:
                        mod = name[:-n]
                        break
                if mod and mod != "__init__":
                    modules[mod] = mod
        return modules.keys() 
Example 69
Project: pyblish-win   Author: pyblish   File: __init__.py    GNU Lesser General Public License v3.0 5 votes vote down vote up
def find_package_modules(package, mask):
    import fnmatch
    if (hasattr(package, "__loader__") and
            hasattr(package.__loader__, '_files')):
        path = package.__name__.replace(".", os.path.sep)
        mask = os.path.join(path, mask)
        for fnm in package.__loader__._files.iterkeys():
            if fnmatch.fnmatchcase(fnm, mask):
                yield os.path.splitext(fnm)[0].replace(os.path.sep, ".")
    else:
        path = package.__path__[0]
        for fnm in os.listdir(path):
            if fnmatch.fnmatchcase(fnm, mask):
                yield "%s.%s" % (package.__name__, os.path.splitext(fnm)[0]) 
Example 70
Project: pyblish-win   Author: pyblish   File: dircache.py    GNU Lesser General Public License v3.0 5 votes vote down vote up
def listdir(path):
    """List directory contents, using cache."""
    try:
        cached_mtime, list = cache[path]
        del cache[path]
    except KeyError:
        cached_mtime, list = -1, []
    mtime = os.stat(path).st_mtime
    if mtime != cached_mtime:
        list = os.listdir(path)
        list.sort()
    cache[path] = mtime, list
    return list 
Example 71
Project: fs_image   Author: facebookincubator   File: yum_dnf_from_snapshot.py    MIT License 4 votes vote down vote up
def _prepare_versionlock_dir(yum_dnf: YumDnf, list_path: Path) -> Path:
    '''
    This prepares a directory containing:
      - the versionlock plugin code (see the Buck target for its provenance)
      - the plugin configuration
      - the actual list of locked versions

    This directory is used by `YumDnfConfIsolator.isolate_main` to tell
    `yum` / `dnf` to use the plugin.
    '''
    with temp_dir() as d:
        vl_conf = textwrap.dedent(f'''\
            [main]
            enabled = 1
            locklist = {d.decode()}/versionlock.list
        ''')
        with open(d / 'versionlock.conf', 'w') as outf:
            outf.write(vl_conf)

        # `dnf` and `yum` expect different formats, so we parse our own.
        template = {
            YumDnf.yum: '{e}:{n}-{v}-{r}.{a}',
            YumDnf.dnf: '{n}-{e}:{v}-{r}.{a}',
        }[yum_dnf]
        with open(list_path) as rf, open(d / 'versionlock.list', 'w') as wf:
            for l in rf:
                e, n, v, r, a = l.split('\t')
                wf.write(template.format(e=e, n=n, v=v, r=r, a=a))

        with importlib.resources.path(
            'rpm', f'{yum_dnf.value}_versionlock.gz',
        ) as p, gzip.open(p) as rf, open(d / 'versionlock.py', 'wb') as wf:
            wf.write(rf.read())

        yield d

        # Clean up, making sure that there are no new files.

        # Comparing the contents of the plugin & its list is too much effort
        with open(d / 'versionlock.conf') as infile:
            assert infile.read() == vl_conf

        assert (set(os.listdir(d)) - {b'versionlock.pyc'}) == {
            b'versionlock.conf', b'versionlock.list', b'versionlock.py',
        }, os.listdir(d) 
Example 72
Project: fs_image   Author: facebookincubator   File: test_yum_dnf_from_snapshot.py    MIT License 4 votes vote down vote up
def _check_installed_content(self, install_root, installed_content):
        # Remove known content so we can check there is nothing else.
        remove = []

        # Check that the RPMs installed their payload.
        for path, content in installed_content.items():
            remove.append(install_root / 'usr/share/rpm_test' / path)
            with open(remove[-1]) as f:
                self.assertEqual(content, f.read())

        # Remove /bin/sh
        remove.append(install_root / 'bin/sh')

        prog_name = self._YUM_DNF.value

        # `yum` & `dnf` also write some indexes & metadata.
        for path in [
            f'var/lib/{prog_name}', 'var/lib/rpm', f'var/cache/{prog_name}',
            'usr/lib/.build-id'
        ]:
            remove.append(install_root / path)
            self.assertTrue(os.path.isdir(remove[-1]), remove[-1])
        remove.append(install_root / f'var/log/{prog_name}.log')
        self.assertTrue(os.path.exists(remove[-1]))
        if self._YUM_DNF == YumDnf.dnf:  # `dnf` loves log files
            for logfile in ['dnf.librepo.log', 'dnf.rpm.log', 'hawkey.log']:
                remove.append(install_root / 'var/log' / logfile)

        # Check that the above list of paths is complete.
        for path in remove:
            # We're running rm -rf as `root`, better be careful.
            self.assertTrue(path.startswith(install_root))
            # Most files are owned by root, so the sudo is needed.
            subprocess.run(['sudo', 'rm', '-rf', path], check=True)
        subprocess.run([
            'sudo', 'rmdir',
            'usr/share/rpm_test', 'usr/share', 'usr/lib', 'usr',
            'var/lib', 'var/cache', 'var/log', 'var/tmp', 'var',
            'bin', *([
                'etc/dnf/modules.d', 'etc/dnf', 'etc'
            ] if self._YUM_DNF == YumDnf.dnf else []),
        ], check=True, cwd=install_root)
        required_dirs = sorted([b'dev', b'meta'])
        self.assertEqual(required_dirs, sorted(os.listdir(install_root)))
        for d in required_dirs:
            self.assertEqual([], os.listdir(install_root / d)) 
Example 73
Project: fs_image   Author: facebookincubator   File: subvolume_garbage_collector.py    MIT License 4 votes vote down vote up
def garbage_collect_subvolumes(refcounts_dir, subvolumes_dir):
    # IMPORTANT: We must list subvolumes BEFORE refcounts. The risk is that
    # this runs concurrently with another build, which will create a new
    # refcount & subvolume (in that order).  If we read refcounts first, we
    # might end up winning the race against the other build, and NOT reading
    # the new refcount.  If we then lose the second part of the race, we
    # would find the subvolume that the other process just created, and
    # delete it.
    subvol_wrappers = set(list_subvolume_wrappers(subvolumes_dir))
    subvol_wrapper_to_nlink = dict(list_refcounts(refcounts_dir))

    # Delete subvolumes (& their wrappers) with insufficient refcounts.
    for subvol_wrapper in subvol_wrappers:
        nlink = subvol_wrapper_to_nlink.get(subvol_wrapper, 0)
        if nlink >= 2:
            if nlink > 2:
                # Not sure how this might happen, but it seems non-fatal...
                log.error(f'{nlink} > 2 links to subvolume {subvol_wrapper}')
            continue
        refcount_path = os.path.join(refcounts_dir, f'{subvol_wrapper}.json')
        log.warning(
            f'Deleting {subvol_wrapper} since its refcount has {nlink} links'
        )
        # Start by unlinking the refcount to dramatically decrease the
        # chance of leaving an orphaned refcount file on disk.  The most
        # obvious way to get an orphaned refcount is for this program to
        # abort between the line that creates the refcount link, and the
        # next line that creates the subvolume wrapper.
        #
        # I do not see a great way to completely eliminate orphan refcount
        # files.  One could try to have a separate pass that flocks the
        # refcount file before removing it, and to also flock the refcount
        # file before creating the wrapper directory.  But, since file
        # creation & flock cannot be atomic, this leaves us open to a race
        # where a concurrent GC pass removes the refcount link immediately
        # after it gets created, so that part of the code would have to be
        # willing to repeat the race until it wins.  In all, that extra
        # complexity is far too ugly compared to the slim risk or leaving
        # some unused refcount files on disk.
        if nlink:
            os.unlink(refcount_path)
        wrapper_path = os.path.join(subvolumes_dir, subvol_wrapper)
        wrapper_content = os.listdir(wrapper_path)
        if len(wrapper_content) > 1:
            raise RuntimeError(f'{wrapper_path} must contain only the subvol')
        if len(wrapper_content) == 1:  # Empty wrappers are OK to GC, too.
            subprocess.check_call([
                'sudo', 'btrfs', 'subvolume', 'delete',
                os.path.join(
                    subvolumes_dir,
                    # Subvols are wrapped in a user-owned temporary directory,
                    # following the convention `{rule name}:{version}/{subvol}`.
                    subvol_wrapper,
                    wrapper_content[0],
                ),
            ])
        os.rmdir(wrapper_path) 
Example 74
Project: fs_image   Author: facebookincubator   File: subvolume_on_disk.py    MIT License 4 votes vote down vote up
def from_serializable_dict(cls, d, subvolumes_dir):
        # This incidentally checks that the subvolume exists and is btrfs.
        subvol_path = os.path.join(
            # This is copypasta of subvolume_path() but I need it before
            # creating the object. The assert below keeps them in sync.
            subvolumes_dir, d[_SUBVOLUME_REL_PATH],
        )
        volume_props = _btrfs_get_volume_props(subvol_path)
        self = cls(**{
            _BTRFS_UUID: d[_BTRFS_UUID],
            _BTRFS_PARENT_UUID: volume_props['Parent UUID'],
            _HOSTNAME: d[_HOSTNAME],
            _SUBVOLUMES_BASE_DIR: subvolumes_dir,
            _SUBVOLUME_REL_PATH: d[_SUBVOLUME_REL_PATH],
        })
        assert subvol_path == self.subvolume_path(), (d, subvolumes_dir)

        # Check that the relative path is garbage-collectable.
        inner_dir = os.path.basename(d[_SUBVOLUME_REL_PATH])
        outer_dir = os.path.basename(os.path.dirname(d[_SUBVOLUME_REL_PATH]))
        if ':' not in outer_dir or (
            d[_SUBVOLUME_REL_PATH] != os.path.join(outer_dir, inner_dir)
        ):
            raise RuntimeError(
                'Subvolume must have the form <rule name>:<version>/<subvol>,'
                f' not {d[_SUBVOLUME_REL_PATH]}'
            )
        outer_dir_content = os.listdir(os.path.join(subvolumes_dir, outer_dir))
        # For GC, the wrapper must contain the subvolume, and nothing else.
        if outer_dir_content != [inner_dir]:
            raise RuntimeError(
                f'Subvolume wrapper {outer_dir} contained {outer_dir_content} '
                f'instead of {[inner_dir]}'
            )
        # Check that the subvolume matches the description.
        cur_host = socket.getfqdn()
        if cur_host != self.hostname:
            raise RuntimeError(
                f'Subvolume {self} did not come from current host {cur_host}'
            )
        if volume_props['UUID'] != self.btrfs_uuid:
            raise RuntimeError(
                f'UUID in subvolume JSON {self} does not match that of the '
                f'actual subvolume {volume_props}'
            )
        return self 
Example 75
Project: leapp-repository   Author: oamg   File: systemfacts.py    Apache License 2.0 4 votes vote down vote up
def _get_active_kernel_modules(logger):
    lines = run(['lsmod'], split=True)['stdout']
    for l in lines[1:]:
        name = l.split(' ')[0]

        # Read parameters of the given module as exposed by the
        # `/sys` VFS, if there are no parameters exposed we just
        # take the name of the module
        base_path = '/sys/module/{module}'.format(module=name)
        parameters_path = os.path.join(base_path, 'parameters')
        if not os.path.exists(parameters_path):
            yield ActiveKernelModule(filename=name, parameters=[])
            continue

        # Use `modinfo` to probe for signature information
        parameter_dict = {}
        try:
            signature = run(['modinfo', '-F', 'signature', name], split=False)['stdout']
        except CalledProcessError:
            signature = None

        signature_string = None
        if signature:
            # Remove whitspace from the signature string
            signature_string = re.sub(r"\s+", "", signature, flags=re.UNICODE)

        # Since we're using the `/sys` VFS we need to use `os.listdir()` to get
        # all the property names and then just read from all the listed paths
        parameters = sorted(os.listdir(parameters_path))
        for param in parameters:
            try:
                with open(os.path.join(parameters_path, param), mode='r') as fp:
                    parameter_dict[param] = fp.read().strip()
            except IOError as exc:
                # Some parameters are write-only, in that case we just log the name of parameter
                # and the module and continue
                if exc.errno in (errno.EACCES, errno.EPERM):
                    msg = 'Unable to read parameter "{param}" of kernel module "{name}"'
                    logger.warning(msg.format(param=param, name=name))
                else:
                    raise exc

        # Project the dictionary as a list of key values
        items = [
            KernelModuleParameter(name=k, value=v)
            for (k, v) in six.iteritems(parameter_dict)
        ]

        yield ActiveKernelModule(
            filename=name,
            parameters=items,
            signature=signature_string
        ) 
Example 76
Project: incubator-spot   Author: apache   File: start_listener.py    Apache License 2.0 4 votes vote down vote up
def main():
    '''
        Main command-line entry point.
    '''
    state = {}

    try:
        args = parse_args()
        conf = json.loads(args.config_file.read())

        # .............................check kerberos authentication
        if os.getenv('KRB_AUTH'):
            kb = Kerberos()
            kb.authenticate()

        state.update(**args.__dict__)

        # .............................add Spark Streaming parameters
        for key in conf['spark-streaming'].keys():
            if conf['spark-streaming'][key] == None:
                continue

            if isinstance(conf['spark-streaming'][key], basestring):
                conf['spark-streaming'][key] = conf['spark-streaming'][key].strip()

                if bool(conf['spark-streaming'][key]):
                    state[key] = conf['spark-streaming'][key]
                continue
            state[key] = conf['spark-streaming'][key]

        # .............................add files to place on the PYTHONPATH
        state['py_files'] = ','.join([os.path.abspath(os.path.join('dist', x)) for x in os.listdir('dist')])

        # .............................add database name
        state['database'] = conf['dbname']

        # .............................add zookeeper's connection string
        state['zkquorum'] = '{0}:{1}'.format(conf['kafka']['zookeper_server'],
                                        conf['kafka']['zookeper_port'])

        spark_job('common/listener.py', **state)

    except SystemExit: raise
    except:
        sys.excepthook(*sys.exc_info())
        sys.exit(1) 
Example 77
Project: pyblish-win   Author: pyblish   File: checkpyc.py    GNU Lesser General Public License v3.0 4 votes vote down vote up
def main():
    silent = 0
    verbose = 0
    if sys.argv[1:]:
        if sys.argv[1] == '-v':
            verbose = 1
        elif sys.argv[1] == '-s':
            silent = 1
    MAGIC = imp.get_magic()
    if not silent:
        print 'Using MAGIC word', repr(MAGIC)
    for dirname in sys.path:
        try:
            names = os.listdir(dirname)
        except os.error:
            print 'Cannot list directory', repr(dirname)
            continue
        if not silent:
            print 'Checking ', repr(dirname), '...'
        names.sort()
        for name in names:
            if name[-3:] == '.py':
                name = os.path.join(dirname, name)
                try:
                    st = os.stat(name)
                except os.error:
                    print 'Cannot stat', repr(name)
                    continue
                if verbose:
                    print 'Check', repr(name), '...'
                name_c = name + 'c'
                try:
                    f = open(name_c, 'r')
                except IOError:
                    print 'Cannot open', repr(name_c)
                    continue
                magic_str = f.read(4)
                mtime_str = f.read(4)
                f.close()
                if magic_str <> MAGIC:
                    print 'Bad MAGIC word in ".pyc" file',
                    print repr(name_c)
                    continue
                mtime = get_long(mtime_str)
                if mtime == 0 or mtime == -1:
                    print 'Bad ".pyc" file', repr(name_c)
                elif mtime <> st[ST_MTIME]:
                    print 'Out-of-date ".pyc" file',
                    print repr(name_c) 
Example 78
Project: pyblish-win   Author: pyblish   File: loader.py    GNU Lesser General Public License v3.0 4 votes vote down vote up
def _find_tests(self, start_dir, pattern):
        """Used by discovery. Yields test suites it loads."""
        paths = os.listdir(start_dir)

        for path in paths:
            full_path = os.path.join(start_dir, path)
            if os.path.isfile(full_path):
                if not VALID_MODULE_NAME.match(path):
                    # valid Python identifiers only
                    continue
                if not self._match_path(path, full_path, pattern):
                    continue
                # if the test file matches, load it
                name = self._get_name_from_path(full_path)
                try:
                    module = self._get_module_from_name(name)
                except:
                    yield _make_failed_import_test(name, self.suiteClass)
                else:
                    mod_file = os.path.abspath(getattr(module, '__file__', full_path))
                    realpath = os.path.splitext(os.path.realpath(mod_file))[0]
                    fullpath_noext = os.path.splitext(os.path.realpath(full_path))[0]
                    if realpath.lower() != fullpath_noext.lower():
                        module_dir = os.path.dirname(realpath)
                        mod_name = os.path.splitext(os.path.basename(full_path))[0]
                        expected_dir = os.path.dirname(full_path)
                        msg = ("%r module incorrectly imported from %r. Expected %r. "
                               "Is this module globally installed?")
                        raise ImportError(msg % (mod_name, module_dir, expected_dir))
                    yield self.loadTestsFromModule(module)
            elif os.path.isdir(full_path):
                if not os.path.isfile(os.path.join(full_path, '__init__.py')):
                    continue

                load_tests = None
                tests = None
                if fnmatch(path, pattern):
                    # only check load_tests if the package directory itself matches the filter
                    name = self._get_name_from_path(full_path)
                    package = self._get_module_from_name(name)
                    load_tests = getattr(package, 'load_tests', None)
                    tests = self.loadTestsFromModule(package, use_load_tests=False)

                if load_tests is None:
                    if tests is not None:
                        # tests loaded from package file
                        yield tests
                    # recurse into the package
                    for test in self._find_tests(full_path, pattern):
                        yield test
                else:
                    try:
                        yield load_tests(self, tests, pattern)
                    except Exception, e:
                        yield _make_failed_load_tests(package.__name__, e,
                                                      self.suiteClass) 
Example 79
Project: pyblish-win   Author: pyblish   File: test_discovery.py    GNU Lesser General Public License v3.0 4 votes vote down vote up
def test_find_tests(self):
        loader = unittest.TestLoader()

        original_listdir = os.listdir
        def restore_listdir():
            os.listdir = original_listdir
        original_isfile = os.path.isfile
        def restore_isfile():
            os.path.isfile = original_isfile
        original_isdir = os.path.isdir
        def restore_isdir():
            os.path.isdir = original_isdir

        path_lists = [['test1.py', 'test2.py', 'not_a_test.py', 'test_dir',
                       'test.foo', 'test-not-a-module.py', 'another_dir'],
                      ['test3.py', 'test4.py', ]]
        os.listdir = lambda path: path_lists.pop(0)
        self.addCleanup(restore_listdir)

        def isdir(path):
            return path.endswith('dir')
        os.path.isdir = isdir
        self.addCleanup(restore_isdir)

        def isfile(path):
            # another_dir is not a package and so shouldn't be recursed into
            return not path.endswith('dir') and not 'another_dir' in path
        os.path.isfile = isfile
        self.addCleanup(restore_isfile)

        loader._get_module_from_name = lambda path: path + ' module'
        loader.loadTestsFromModule = lambda module: module + ' tests'

        top_level = os.path.abspath('/foo')
        loader._top_level_dir = top_level
        suite = list(loader._find_tests(top_level, 'test*.py'))

        expected = [name + ' module tests' for name in
                    ('test1', 'test2')]
        expected.extend([('test_dir.%s' % name) + ' module tests' for name in
                    ('test3', 'test4')])
        self.assertEqual(suite, expected) 
Example 80
Project: pyblish-win   Author: pyblish   File: pytree_idempotency.py    GNU Lesser General Public License v3.0 4 votes vote down vote up
def main():
    gr = driver.load_grammar("Grammar.txt")
    dr = driver.Driver(gr, convert=pytree.convert)

    fn = "example.py"
    tree = dr.parse_file(fn, debug=True)
    if not diff(fn, tree):
        print "No diffs."
    if not sys.argv[1:]:
        return # Pass a dummy argument to run the complete test suite below

    problems = []

    # Process every imported module
    for name in sys.modules:
        mod = sys.modules[name]
        if mod is None or not hasattr(mod, "__file__"):
            continue
        fn = mod.__file__
        if fn.endswith(".pyc"):
            fn = fn[:-1]
        if not fn.endswith(".py"):
            continue
        print >>sys.stderr, "Parsing", fn
        tree = dr.parse_file(fn, debug=True)
        if diff(fn, tree):
            problems.append(fn)

    # Process every single module on sys.path (but not in packages)
    for dir in sys.path:
        try:
            names = os.listdir(dir)
        except os.error:
            continue
        print >>sys.stderr, "Scanning", dir, "..."
        for name in names:
            if not name.endswith(".py"):
                continue
            print >>sys.stderr, "Parsing", name
            fn = os.path.join(dir, name)
            try:
                tree = dr.parse_file(fn, debug=True)
            except pgen2.parse.ParseError, err:
                print "ParseError:", err
            else:
                if diff(fn, tree):
                    problems.append(fn)

    # Show summary of problem files