Python shutil.copyfile() Examples

The following are 30 code examples for showing how to use shutil.copyfile(). These examples are extracted from open source projects. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example.

You may check out the related API usage on the sidebar.

You may also want to check out all available functions/classes of the module shutil , or try the search function .

Example 1
Project: arm_now   Author: nongiach   File: download.py    License: MIT License 6 votes vote down vote up
def download(url, filename, cache_directory):
    filename_cache = url.split('/')[-1]
    filename_cache = ''.join([c for c in filename_cache if c.isdigit() or c.isalpha()])
    filename_cache = cache_directory + "/" + filename_cache
    if os.path.exists(filename):
        return
    elif os.path.exists(filename_cache):
        print("Already downloaded")
        shutil.copyfile(filename_cache, filename)
    else:
        print("\nDownloading {} from {}".format(filename, url))
        os.mkdir(cache_directory)
        # wget.download(url, out=filename_cache)
        obj = SmartDL(url, filename_cache)
        obj.start()
        shutil.copyfile(filename_cache, filename) 
Example 2
Project: pruning_yolov3   Author: zbyuan   File: utils.py    License: GNU General Public License v3.0 6 votes vote down vote up
def coco_single_class_labels(path='../coco/labels/train2014/', label_class=43):
    # Makes single-class coco datasets. from utils.utils import *; coco_single_class_labels()
    if os.path.exists('new/'):
        shutil.rmtree('new/')  # delete output folder
    os.makedirs('new/')  # make new output folder
    os.makedirs('new/labels/')
    os.makedirs('new/images/')
    for file in tqdm(sorted(glob.glob('%s/*.*' % path))):
        with open(file, 'r') as f:
            labels = np.array([x.split() for x in f.read().splitlines()], dtype=np.float32)
        i = labels[:, 0] == label_class
        if any(i):
            img_file = file.replace('labels', 'images').replace('txt', 'jpg')
            labels[:, 0] = 0  # reset class to 0
            with open('new/images.txt', 'a') as f:  # add image to dataset list
                f.write(img_file + '\n')
            with open('new/labels/' + Path(file).name, 'a') as f:  # write label
                for l in labels[i]:
                    f.write('%g %.6f %.6f %.6f %.6f\n' % tuple(l))
            shutil.copyfile(src=img_file, dst='new/images/' + Path(file).name.replace('txt', 'jpg'))  # copy images 
Example 3
Project: Pytorch-Project-Template   Author: moemen95   File: condensenet.py    License: MIT License 6 votes vote down vote up
def save_checkpoint(self, filename='checkpoint.pth.tar', is_best=0):
        """
        Saving the latest checkpoint of the training
        :param filename: filename which will contain the state
        :param is_best: flag is it is the best model
        :return:
        """
        state = {
            'epoch': self.current_epoch,
            'iteration': self.current_iteration,
            'state_dict': self.model.state_dict(),
            'optimizer': self.optimizer.state_dict(),
        }
        # Save the state
        torch.save(state, self.config.checkpoint_dir + filename)
        # If it is the best copy it to another file 'model_best.pth.tar'
        if is_best:
            shutil.copyfile(self.config.checkpoint_dir + filename,
                            self.config.checkpoint_dir + 'model_best.pth.tar') 
Example 4
Project: Pytorch-Project-Template   Author: moemen95   File: erfnet.py    License: MIT License 6 votes vote down vote up
def save_checkpoint(self, filename='checkpoint.pth.tar', is_best=0):
        """
        Saving the latest checkpoint of the training
        :param filename: filename which will contain the state
        :param is_best: flag is it is the best model
        :return:
        """
        state = {
            'epoch': self.current_epoch + 1,
            'iteration': self.current_iteration,
            'state_dict': self.model.state_dict(),
            'optimizer': self.optimizer.state_dict(),
        }
        # Save the state
        torch.save(state, self.config.checkpoint_dir + filename)
        # If it is the best copy it to another file 'model_best.pth.tar'
        if is_best:
            shutil.copyfile(self.config.checkpoint_dir + filename,
                            self.config.checkpoint_dir + 'model_best.pth.tar') 
Example 5
Project: Pytorch-Project-Template   Author: moemen95   File: dcgan.py    License: MIT License 6 votes vote down vote up
def save_checkpoint(self, file_name="checkpoint.pth.tar", is_best = 0):
        state = {
            'epoch': self.current_epoch,
            'iteration': self.current_iteration,
            'G_state_dict': self.netG.state_dict(),
            'G_optimizer': self.optimG.state_dict(),
            'D_state_dict': self.netD.state_dict(),
            'D_optimizer': self.optimD.state_dict(),
            'fixed_noise': self.fixed_noise,
            'manual_seed': self.manual_seed
        }
        # Save the state
        torch.save(state, self.config.checkpoint_dir + file_name)
        # If it is the best copy it to another file 'model_best.pth.tar'
        if is_best:
            shutil.copyfile(self.config.checkpoint_dir + file_name,
                            self.config.checkpoint_dir + 'model_best.pth.tar') 
Example 6
Project: delocate   Author: matthew-brett   File: test_delocating.py    License: BSD 2-Clause "Simplified" License 6 votes vote down vote up
def test_dyld_library_path_beats_basename():
    # Test that we find libraries on DYLD_LIBRARY_PATH before basename
    with TempDirWithoutEnvVars('DYLD_LIBRARY_PATH') as tmpdir:
        # Copy libs into a temporary directory
        subtree = pjoin(tmpdir, 'subtree')
        all_local_libs = _make_libtree(subtree)
        liba, libb, libc, test_lib, slibc, stest_lib = all_local_libs
        # Copy liba into a subdirectory
        subdir = os.path.join(subtree, 'subdir')
        os.mkdir(subdir)
        new_libb = os.path.join(subdir, os.path.basename(LIBB))
        shutil.copyfile(libb, new_libb)
        # Without updating the environment variable, we find the lib normally
        predicted_lib_location = search_environment_for_lib(libb)
        # tmpdir can end up in /var, and that can be symlinked to
        # /private/var, so we'll use realpath to resolve the two
        assert_equal(predicted_lib_location, os.path.realpath(libb))
        # Updating shows us the new lib
        os.environ['DYLD_LIBRARY_PATH'] = subdir
        predicted_lib_location = search_environment_for_lib(libb)
        assert_equal(predicted_lib_location, new_libb) 
Example 7
Project: delocate   Author: matthew-brett   File: test_delocating.py    License: BSD 2-Clause "Simplified" License 6 votes vote down vote up
def test_dyld_fallback_library_path_loses_to_basename():
    # Test that we find libraries on basename before DYLD_FALLBACK_LIBRARY_PATH
    with TempDirWithoutEnvVars('DYLD_FALLBACK_LIBRARY_PATH') as tmpdir:
        # Copy libs into a temporary directory
        subtree = pjoin(tmpdir, 'subtree')
        all_local_libs = _make_libtree(subtree)
        liba, libb, libc, test_lib, slibc, stest_lib = all_local_libs
        # Copy liba into a subdirectory
        subdir = 'subdir'
        os.mkdir(subdir)
        new_libb = os.path.join(subdir, os.path.basename(LIBB))
        shutil.copyfile(libb, new_libb)
        os.environ['DYLD_FALLBACK_LIBRARY_PATH'] = subdir
        predicted_lib_location = search_environment_for_lib(libb)
        # tmpdir can end up in /var, and that can be symlinked to
        # /private/var, so we'll use realpath to resolve the two
        assert_equal(predicted_lib_location, os.path.realpath(libb)) 
Example 8
Project: delocate   Author: matthew-brett   File: test_tools.py    License: BSD 2-Clause "Simplified" License 6 votes vote down vote up
def test_get_archs_fuse():
    # Test routine to get architecture types from file
    assert_equal(get_archs(LIB32), ARCH_32)
    assert_equal(get_archs(LIB64), ARCH_64)
    assert_equal(get_archs(LIB64A), ARCH_64)
    assert_equal(get_archs(LIBBOTH), ARCH_BOTH)
    assert_raises(RuntimeError, get_archs, 'not_a_file')
    with InTemporaryDirectory():
        lipo_fuse(LIB32, LIB64, 'anotherlib')
        assert_equal(get_archs('anotherlib'), ARCH_BOTH)
        lipo_fuse(LIB64, LIB32, 'anotherlib')
        assert_equal(get_archs('anotherlib'), ARCH_BOTH)
        shutil.copyfile(LIB32, 'libcopy32')
        lipo_fuse('libcopy32', LIB64, 'anotherlib')
        assert_equal(get_archs('anotherlib'), ARCH_BOTH)
        assert_raises(RuntimeError, lipo_fuse,
                      'libcopy32', LIB32, 'yetanother')
        shutil.copyfile(LIB64, 'libcopy64')
        assert_raises(RuntimeError, lipo_fuse,
                      'libcopy64', LIB64, 'yetanother') 
Example 9
Project: delocate   Author: matthew-brett   File: test_wheelies.py    License: BSD 2-Clause "Simplified" License 6 votes vote down vote up
def test_patch_wheel():
    # Check patching of wheel
    with InTemporaryDirectory():
        # First wheel needs proper wheel filename for later unpack test
        out_fname = basename(PURE_WHEEL)
        patch_wheel(PURE_WHEEL, WHEEL_PATCH, out_fname)
        zip2dir(out_fname, 'wheel1')
        with open(pjoin('wheel1', 'fakepkg2', '__init__.py'), 'rt') as fobj:
            assert_equal(fobj.read(), 'print("Am in init")\n')
        # Check that wheel unpack works
        back_tick([sys.executable, '-m', 'wheel', 'unpack', out_fname])
        # Copy the original, check it doesn't have patch
        shutil.copyfile(PURE_WHEEL, 'copied.whl')
        zip2dir('copied.whl', 'wheel2')
        with open(pjoin('wheel2', 'fakepkg2', '__init__.py'), 'rt') as fobj:
            assert_equal(fobj.read(), '')
        # Overwrite input wheel (the default)
        patch_wheel('copied.whl', WHEEL_PATCH)
        # Patched
        zip2dir('copied.whl', 'wheel3')
        with open(pjoin('wheel3', 'fakepkg2', '__init__.py'), 'rt') as fobj:
            assert_equal(fobj.read(), 'print("Am in init")\n')
        # Check bad patch raises error
        assert_raises(RuntimeError,
                      patch_wheel, PURE_WHEEL, WHEEL_PATCH_BAD, 'out.whl') 
Example 10
Project: delocate   Author: matthew-brett   File: test_scripts.py    License: BSD 2-Clause "Simplified" License 6 votes vote down vote up
def test_fix_wheel_dylibs():
    # Check default and non-default search for dynamic libraries
    with InTemporaryDirectory() as tmpdir:
        # Default in-place fix
        fixed_wheel, stray_lib = _fixed_wheel(tmpdir)
        _rename_module(fixed_wheel, 'module.other', 'test.whl')
        shutil.copyfile('test.whl', 'test2.whl')
        # Default is to look in all files and therefore fix
        code, stdout, stderr = run_command(
            ['delocate-wheel', 'test.whl'])
        _check_wheel('test.whl', '.dylibs')
        # Can turn this off to only look in dynamic lib exts
        code, stdout, stderr = run_command(
            ['delocate-wheel', 'test2.whl', '-d'])
        with InWheel('test2.whl'):  # No fix
            assert_false(exists(pjoin('fakepkg1', '.dylibs'))) 
Example 11
Project: delocate   Author: matthew-brett   File: test_scripts.py    License: BSD 2-Clause "Simplified" License 6 votes vote down vote up
def test_patch_wheel():
    # Some tests for patching wheel
    with InTemporaryDirectory():
        shutil.copyfile(PURE_WHEEL, 'example.whl')
        # Default is to overwrite input
        code, stdout, stderr = run_command(
            ['delocate-patch', 'example.whl', WHEEL_PATCH])
        zip2dir('example.whl', 'wheel1')
        with open(pjoin('wheel1', 'fakepkg2', '__init__.py'), 'rt') as fobj:
            assert_equal(fobj.read(), 'print("Am in init")\n')
        # Pass output directory
        shutil.copyfile(PURE_WHEEL, 'example.whl')
        code, stdout, stderr = run_command(
            ['delocate-patch', 'example.whl', WHEEL_PATCH, '-w', 'wheels'])
        zip2dir(pjoin('wheels', 'example.whl'), 'wheel2')
        with open(pjoin('wheel2', 'fakepkg2', '__init__.py'), 'rt') as fobj:
            assert_equal(fobj.read(), 'print("Am in init")\n')
        # Bad patch fails
        shutil.copyfile(PURE_WHEEL, 'example.whl')
        assert_raises(RuntimeError,
                      run_command,
                      ['delocate-patch', 'example.whl', WHEEL_PATCH_BAD]) 
Example 12
Project: sqs-s3-logger   Author: ellimilial   File: lambda_function_builder.py    License: Apache License 2.0 6 votes vote down vote up
def build_package():
    build_dir = tempfile.mkdtemp(prefix='lambda_package_')
    install_packages(build_dir, REQUIRED_PACKAGES)
    for f in REQUIRED_FILES:
        shutil.copyfile(
            src=os.path.join(module_path, f),
            dst=os.path.join(build_dir, f)
        )

    out_file = os.path.join(
        tempfile.mkdtemp(prefix='lambda_package_built'),
        'sqs_s3_logger_lambda_{}.zip'.format(datetime.datetime.now().isoformat())
    )
    LOGGER.info('Creating a function package file at {}'.format(out_file))

    archive(build_dir, out_file)
    return out_file 
Example 13
Project: skelebot   Author: carsdotcom   File: artifactory.py    License: MIT License 6 votes vote down vote up
def pushArtifact(artifactFile, user, token, file, url, force):
    """Pushes the given file to the url with the provided user/token auth"""

    # Error and exit if artifact already exists and we are not forcing an override
    try:
        if (not force) and (artifactory.ArtifactoryPath(url, auth=(user, token)).exists()):
            raise RuntimeError(ERROR_ALREADY_PUSHED)
    except MissingSchema:
        pass

    # Rename artifact, deploy the renamed artifact, and then rename it back to original name
    print("Deploying {file} to {url}".format(file=file, url=url))
    path = artifactory.ArtifactoryPath(url, auth=(user, token))
    shutil.copyfile(artifactFile, file)
    try:
        path.deploy_file(file)
        os.remove(file)
    except:
        os.remove(file)
        raise 
Example 14
Project: toolium   Author: Telefonica   File: test_visual_test.py    License: Apache License 2.0 6 votes vote down vote up
def test_assert_screenshot_no_enabled_force(driver_wrapper):
    # Configure driver mock
    with open(file_v1, "rb") as f:
        image_data = f.read()
    driver_wrapper.driver.get_screenshot_as_png.return_value = image_data

    # Update conf and create a new VisualTest instance
    driver_wrapper.config.set('VisualTests', 'enabled', 'false')
    visual = VisualTest(driver_wrapper, force=True)

    # Add v1 baseline image
    baseline_file = os.path.join(root_path, 'output', 'visualtests', 'baseline', 'firefox', 'screenshot_full.png')
    shutil.copyfile(file_v1, baseline_file)

    # Assert screenshot
    visual.assert_screenshot(None, filename='screenshot_full', file_suffix='screenshot_suffix')
    driver_wrapper.driver.get_screenshot_as_png.assert_called_once_with() 
Example 15
Project: toolium   Author: Telefonica   File: test_visual_test.py    License: Apache License 2.0 6 votes vote down vote up
def test_assert_screenshot_no_enabled_force_fail(driver_wrapper):
    # Configure driver mock
    with open(file_v1, "rb") as f:
        image_data = f.read()
    driver_wrapper.driver.get_screenshot_as_png.return_value = image_data

    # Update conf and create a new VisualTest instance
    driver_wrapper.config.set('VisualTests', 'fail', 'false')
    driver_wrapper.config.set('VisualTests', 'enabled', 'false')
    visual = VisualTest(driver_wrapper, force=True)

    # Add v2 baseline image
    baseline_file = os.path.join(root_path, 'output', 'visualtests', 'baseline', 'firefox', 'screenshot_full.png')
    shutil.copyfile(file_v2, baseline_file)

    # Assert screenshot
    with pytest.raises(AssertionError) as exc:
        visual.assert_screenshot(None, filename='screenshot_full', file_suffix='screenshot_suffix')
    driver_wrapper.driver.get_screenshot_as_png.assert_called_once_with()
    assert str(exc.value).endswith("did not match the baseline '%s' (by a distance of 522.65)" % baseline_file) 
Example 16
Project: overhaul-distillation   Author: clovaai   File: saver.py    License: MIT License 6 votes vote down vote up
def save_checkpoint(self, state, is_best, filename='checkpoint.pth.tar'):
        """Saves checkpoint to disk"""
        filename = os.path.join(self.experiment_dir, filename)
        torch.save(state, filename)
        if is_best:
            best_pred = state['best_pred']
            with open(os.path.join(self.experiment_dir, 'best_pred.txt'), 'w') as f:
                f.write(str(best_pred))
            if self.runs:
                previous_miou = [0.0]
                for run in self.runs:
                    run_id = run.split('_')[-1]
                    path = os.path.join(self.directory, 'experiment_{}'.format(str(run_id)), 'best_pred.txt')
                    if os.path.exists(path):
                        with open(path, 'r') as f:
                            miou = float(f.readline())
                            previous_miou.append(miou)
                    else:
                        continue
                max_miou = max(previous_miou)
                if best_pred > max_miou:
                    shutil.copyfile(filename, os.path.join(self.directory, 'model_best.pth.tar'))
            else:
                shutil.copyfile(filename, os.path.join(self.directory, 'model_best.pth.tar')) 
Example 17
Project: MSDNet-PyTorch   Author: kalviny   File: main.py    License: MIT License 6 votes vote down vote up
def save_checkpoint(state, args, is_best, filename, result):
    print(args)
    result_filename = os.path.join(args.save, 'scores.tsv')
    model_dir = os.path.join(args.save, 'save_models')
    latest_filename = os.path.join(model_dir, 'latest.txt')
    model_filename = os.path.join(model_dir, filename)
    best_filename = os.path.join(model_dir, 'model_best.pth.tar')
    os.makedirs(args.save, exist_ok=True)
    os.makedirs(model_dir, exist_ok=True)
    print("=> saving checkpoint '{}'".format(model_filename))

    torch.save(state, model_filename)

    with open(result_filename, 'w') as f:
        print('\n'.join(result), file=f)

    with open(latest_filename, 'w') as fout:
        fout.write(model_filename)
    if is_best:
        shutil.copyfile(model_filename, best_filename)

    print("=> saved checkpoint '{}'".format(model_filename))
    return 
Example 18
Project: landmarkerio-server   Author: menpo   File: cache.py    License: BSD 3-Clause "New" or "Revised" License 6 votes vote down vote up
def _cache_image_for_id(cache_dir, asset_id, img):
    asset_cache_dir = p.join(cache_dir, asset_id)
    image_info_path = p.join(asset_cache_dir, CacheFile.image)
    texture_path = p.join(asset_cache_dir, CacheFile.texture)
    thumbnail_path = p.join(asset_cache_dir, CacheFile.thumbnail)
    img_path = img.path

    # WebGL only allows textures of maximum dimension 4096
    ratio = 4096.0 / np.array(img.shape)
    if np.any(ratio < 1):
        # the largest axis of the img could be too big for older browsers.
        # Give a warning.
        print('Warning: {} has shape {}. Dims larger than 4096 may have '
              'issues rendering in older browsers.'.format(asset_id,
                                                           img.shape))

    # 2. Save out the image
    if img_path.suffix == '.jpg':
        # Original was a jpg that was suitable, save it
        shutil.copyfile(str(img_path), texture_path)
    else:
        # Original wasn't a jpg or was too big - make it so
        img.as_PILImage().save(texture_path, format='jpeg')
    # 3. Save out the thumbnail
    save_jpg_thumbnail_file(img, thumbnail_path) 
Example 19
Project: fusesoc   Author: olofk   File: coregen.py    License: BSD 2-Clause "Simplified" License 6 votes vote down vote up
def _checkout(self, local_dir):
        script_file = self.config.get("script_file")
        project_file = self.config.get("project_file")
        extra_files = self.config.get("extra_files")
        logger.info("Using Coregen to generate project " + project_file)
        if not os.path.isdir(local_dir):
            os.makedirs(local_dir)
        src_files = [script_file, project_file]
        if extra_files:
            src_files += extra_files.split()

        for f in src_files:
            f_src = os.path.join(self.core_root, f)
            f_dst = os.path.join(local_dir, f)
            if os.path.exists(f_src):
                d_dst = os.path.dirname(f_dst)
                if not os.path.exists(d_dst):
                    os.makedirs(d_dst)
                shutil.copyfile(f_src, f_dst)
            else:
                logger.error("Cannot find file %s" % f_src)
        args = ["-r", "-b", script_file, "-p", project_file]
        Launcher("coregen", args, cwd=local_dir).run() 
Example 20
Project: fusesoc   Author: olofk   File: logicore.py    License: BSD 2-Clause "Simplified" License 6 votes vote down vote up
def _checkout(self, local_dir):
        script_file = self.config.get("script_file")
        project_file = self.config.get("project_file")
        extra_files = self.config.get("extra_files")
        logger.info(
            "Using Xilinx Vivado to generate LogiCORE(tm) project " + project_file
        )
        if not os.path.isdir(local_dir):
            os.mkdir(local_dir)
        src_files = [script_file, project_file]
        if extra_files:
            src_files += extra_files.split()

        for f in src_files:
            f_src = os.path.join(self.core_root, f)
            f_dst = os.path.join(local_dir, f)
            if os.path.exists(f_src):
                d_dst = os.path.dirname(f_dst)
                if not os.path.exists(d_dst):
                    os.makedirs(d_dst)
                shutil.copyfile(f_src, f_dst)
            else:
                logger.error("Cannot find file %s" % f_src)
        args = ["-mode", "batch", "-source", script_file]
        Launcher("vivado", args, cwd=local_dir).run() 
Example 21
Project: ConvLab   Author: ConvLab   File: mdbt.py    License: MIT License 6 votes vote down vote up
def auto_download(self):
        """Automatically download the pretrained model and necessary data."""
        if not os.path.exists(self.data_dir):
            os.mkdir(self.data_dir)
        if os.path.exists(os.path.join(self.data_dir, 'models')) and \
            os.path.exists(os.path.join(self.data_dir, 'data')) and \
            os.path.exists(os.path.join(self.data_dir, 'word-vectors')):
            return
        cached_path(self.file_url, self.data_dir)
        files = os.listdir(self.data_dir)
        target_file = ''
        for name in files:
            if name.endswith('.json'):
                target_file = name[:-5]
        try:
            assert target_file in files
        except Exception as e:
            print('allennlp download file error: MDBT Multiwoz data download failed.')
            raise e
        zip_file_path = os.path.join(self.data_dir, target_file+'.zip')
        shutil.copyfile(os.path.join(self.data_dir, target_file), zip_file_path)
        # zip_file_path = os.path.join(self.data_dir, 'mdbt_multiwoz_sys.zip')
        with zipfile.ZipFile(zip_file_path, 'r') as zip_ref:
            zip_ref.extractall(self.data_dir) 
Example 22
Project: GroundedTranslation   Author: elliottd   File: Callbacks.py    License: BSD 3-Clause "New" or "Revised" License 6 votes vote down vote up
def create_checkpoint_directory(self, savetime):
        '''
        We will create one directory to store all of the epochs data inside.
        The name is based on the run_string (if provided) or the current time.
        '''

        prefix = self.args.run_string if self.args.run_string != "" else ""
        number = "%03d" % (len(self.val_metric) + 1)
        filepath = "checkpoints/%s/%s-%s" % ((prefix, number, savetime))
        try:
            os.mkdir("checkpoints/%s/" % (prefix))
            shutil.copyfile("train.py", "checkpoints/%s/train.py" % prefix)
            shutil.copyfile("models.py", "checkpoints/%s/models.py" % prefix)
        except OSError:
            pass  # directory already exists
        try:
            os.mkdir(filepath)
        except OSError:
            pass  # directory already exists
        logger.info("\nIn %s ...",filepath)
        return filepath 
Example 23
Project: Radium   Author: mehulj94   File: Mozilla.py    License: Apache License 2.0 6 votes vote down vote up
def save_db(self, userpath):

        # create the folder to save it by profile
        relative_path = constant.folder_name + os.sep + 'firefox'
        if not os.path.exists(relative_path):
            os.makedirs(relative_path)

        relative_path += os.sep + os.path.basename(userpath)
        if not os.path.exists(relative_path):
            os.makedirs(relative_path)

        # Get the database name
        if os.path.exists(userpath + os.sep + 'logins.json'):
            dbname = 'logins.json'
        elif os.path.exists(userpath + os.sep + 'signons.sqlite'):
            dbname = 'signons.sqlite'

        # copy the files (database + key3.db)
        try:
            ori_db = userpath + os.sep + dbname
            dst_db = relative_path + os.sep + dbname
            shutil.copyfile(ori_db, dst_db)
        except Exception, e:
            pass 
Example 24
Project: pyGSTi   Author: pyGSTio   File: merge_helpers.py    License: Apache License 2.0 6 votes vote down vote up
def rsync_offline_dir(outputDir):
    """
    Copy the pyGSTi 'offline' directory into `outputDir` by creating or updating
    any outdated files as needed.
    """
    destDir = _os.path.join(str(outputDir), "offline")
    offlineDir = _os.path.join(_os.path.dirname(_os.path.abspath(__file__)),
                               "templates", "offline")  # TODO package resources?
    if not _os.path.exists(destDir):
        _shutil.copytree(offlineDir, destDir)

    else:
        for dirpath, _, filenames in _os.walk(str(offlineDir)):
            for nm in filenames:
                srcnm = _os.path.join(dirpath, nm)
                relnm = _os.path.relpath(srcnm, offlineDir)
                destnm = _os.path.join(destDir, relnm)

                if not _os.path.isfile(destnm) or \
                        (_os.path.getmtime(destnm) < _os.path.getmtime(srcnm)):
                    _shutil.copyfile(srcnm, destnm)
                    #print("COPYING to %s" % destnm) 
Example 25
Project: recipes-py   Author: luci   File: proto_support.py    License: Apache License 2.0 6 votes vote down vote up
def _collect_protos(argfile_fd, proto_files, dest):
  """Copies all proto_files into dest.

  Writes this list of files to `argfile_fd` which will be passed to protoc.

  Args:
    * argfile_fd (int): An open writable file descriptor for the argfile.
    * proto_files (List[Tuple[src_abspath: str, dest_relpath: str]])
    * dest (str): Path to the directory where we should collect the .proto
    files.

  Side-effects:
    * Each dest_relpath is written to `argfile_fd` on its own line.
    * Closes `argfile_fd`.
  """
  try:
    _makedirs = _DirMaker()
    for src_abspath, dest_relpath in proto_files:
      destpath = os.path.join(dest, dest_relpath)
      _makedirs(os.path.dirname(destpath))
      shutil.copyfile(src_abspath, destpath)
      os.write(argfile_fd, dest_relpath)
      os.write(argfile_fd, '\n')
  finally:
    os.close(argfile_fd)  # for windows 
Example 26
Project: robosat   Author: mapbox   File: subset.py    License: MIT License 6 votes vote down vote up
def main(args):
    images = tiles_from_slippy_map(args.images)

    tiles = set(tiles_from_csv(args.tiles))

    for tile, src in tqdm(list(images), desc="Subset", unit="image", ascii=True):
        if tile not in tiles:
            continue

        # The extention also includes the period.
        extention = os.path.splitext(src)[1]

        os.makedirs(os.path.join(args.out, str(tile.z), str(tile.x)), exist_ok=True)
        dst = os.path.join(args.out, str(tile.z), str(tile.x), "{}{}".format(tile.y, extention))

        shutil.copyfile(src, dst) 
Example 27
Project: drydock   Author: airshipit   File: conftest.py    License: Apache License 2.0 5 votes vote down vote up
def input_files(tmpdir_factory, request):
    tmpdir = tmpdir_factory.mktemp('data')
    samples_dir = os.path.dirname(os.getenv('YAMLDIR'))
    samples = os.listdir(samples_dir)

    for f in samples:
        src_file = samples_dir + "/" + f
        dst_file = str(tmpdir) + "/" + f
        shutil.copyfile(src_file, dst_file)

    return tmpdir 
Example 28
Project: drydock   Author: airshipit   File: test_schema_validation.py    License: Apache License 2.0 5 votes vote down vote up
def input_files(self, tmpdir_factory, request):
        tmpdir = tmpdir_factory.mktemp('data')
        samples_dir = os.path.dirname(str(
            request.fspath)) + "/" + "../yaml_samples"
        samples = os.listdir(samples_dir)

        for f in samples:
            src_file = samples_dir + "/" + f
            dst_file = str(tmpdir) + "/" + f
            shutil.copyfile(src_file, dst_file)

        return tmpdir 
Example 29
def snapshot(self, iter):
    net = self.net

    if not os.path.exists(self.output_dir):
      os.makedirs(self.output_dir)

    # Store the model snapshot
    filename = cfg.TRAIN.SNAPSHOT_PREFIX + '_iter_{:d}'.format(iter) + '.pth'
    filename = os.path.join(self.output_dir, filename)
    torch.save(self.net.state_dict(), filename)
    print('Wrote snapshot to: {:s}'.format(filename))
    
    
    if iter % 10000 == 0:
        shutil.copyfile(filename, filename + '.{:d}_cache'.format(iter))
    
    # Also store some meta information, random state, etc.
    nfilename = cfg.TRAIN.SNAPSHOT_PREFIX + '_iter_{:d}'.format(iter) + '.pkl'
    nfilename = os.path.join(self.output_dir, nfilename)
    # current state of numpy random
    st0 = np.random.get_state()
    # current position in the database
    cur = self.data_layer._cur
    # current shuffled indexes of the database
    perm = self.data_layer._perm
    # current position in the validation database
    cur_val = self.data_layer_val._cur
    # current shuffled indexes of the validation database
    perm_val = self.data_layer_val._perm

    # Dump the meta info
    with open(nfilename, 'wb') as fid:
      pickle.dump(st0, fid, pickle.HIGHEST_PROTOCOL)
      pickle.dump(cur, fid, pickle.HIGHEST_PROTOCOL)
      pickle.dump(perm, fid, pickle.HIGHEST_PROTOCOL)
      pickle.dump(cur_val, fid, pickle.HIGHEST_PROTOCOL)
      pickle.dump(perm_val, fid, pickle.HIGHEST_PROTOCOL)
      pickle.dump(iter, fid, pickle.HIGHEST_PROTOCOL)

    return filename, nfilename 
Example 30
Project: neural-fingerprinting   Author: StephanZheng   File: dataset_helper.py    License: BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def download_dataset(storage_client, image_batches, target_dir,
                     local_dataset_copy=None):
  """Downloads dataset, organize it by batches and rename images.

  Args:
    storage_client: instance of the CompetitionStorageClient
    image_batches: subclass of ImageBatchesBase with data about images
    target_dir: target directory, should exist and be empty
    local_dataset_copy: directory with local dataset copy, if local copy is
      available then images will be takes from there instead of Cloud Storage

  Data in the target directory will be organized into subdirectories by batches,
  thus path to each image will be "target_dir/BATCH_ID/IMAGE_ID.png"
  where BATCH_ID - ID of the batch (key of image_batches.data),
  IMAGE_ID - ID of the image (key of image_batches.data[batch_id]['images'])
  """
  for batch_id, batch_value in iteritems(image_batches.data):
    batch_dir = os.path.join(target_dir, batch_id)
    os.mkdir(batch_dir)
    for image_id, image_val in iteritems(batch_value['images']):
      dst_filename = os.path.join(batch_dir, image_id + '.png')
      # try to use local copy first
      if local_dataset_copy:
        local_filename = os.path.join(local_dataset_copy,
                                      os.path.basename(image_val['image_path']))
        if os.path.exists(local_filename):
          shutil.copyfile(local_filename, dst_filename)
          continue
      # download image from cloud
      cloud_path = ('gs://' + storage_client.bucket_name
                    + '/' + image_val['image_path'])
      if not os.path.exists(dst_filename):
        subprocess.call(['gsutil', 'cp', cloud_path, dst_filename])