Python os.makedirs() Examples

The following are code examples for showing how to use os.makedirs(). They are from open source Python projects. You can vote up the examples you like or vote down the ones you don't like.

Example 1
Project: factotum   Author: Denubis   File: update.py    GNU General Public License v3.0 10 votes vote down vote up
def copytree(src, dst, symlinks = False, ignore = None):
	if not os.path.exists(dst):
		os.makedirs(dst)
		shutil.copystat(src, dst)
	lst = os.listdir(src)
	if ignore:
		excl = ignore(src, lst)
		lst = [x for x in lst if x not in excl]
	for item in lst:
		s = os.path.join(src, item)
		d = os.path.join(dst, item)
		if symlinks and os.path.islink(s):
			if os.path.lexists(d):
				os.remove(d)
			os.symlink(os.readlink(s), d)
			try:
				st = os.lstat(s)
				mode = stat.S_IMODE(st.st_mode)
				os.lchmod(d, mode)
			except:
				pass # lchmod not available
		elif os.path.isdir(s):
			copytree(s, d, symlinks, ignore)
		else:
			shutil.copy2(s, d) 
Example 2
Project: fs_image   Author: facebookincubator   File: temp_subvolumes.py    MIT License 7 votes vote down vote up
def _prep_rel_path(self, rel_path: AnyStr):
        '''
        Ensures subvolumes live under our temporary directory, which
        improves safety, since its permissions ought to be u+rwx to avoid
        exposing setuid binaries inside the built subvolumes.
        '''
        rel_path = os.path.relpath(
            os.path.realpath(
                os.path.join(self._temp_dir, byteme(rel_path)),
            ),
            start=os.path.realpath(self._temp_dir),
        )
        if (
            rel_path == b'..' or rel_path.startswith(b'../') or
            os.path.isabs(rel_path)
        ):
            raise AssertionError(
                f'{rel_path} must be a subdirectory of {self._temp_dir}'
            )
        abs_path = os.path.join(self._temp_dir, rel_path)
        try:
            os.makedirs(os.path.dirname(abs_path))
        except FileExistsError:
            pass
        return abs_path 
Example 3
Project: wikilinks   Author: trovdimi   File: htmlstripper.py    MIT License 7 votes vote down vote up
def plaintext2zip(self, file_name, subdirname, plaintext):

        file_name=file_name.split('.')[0]
        plaintext_file_name = STATIC_PLAINTEXT_ARTICLES_DIR+subdirname+'/'+file_name+'.txt'
        zip_file_name = STATIC_PLAINTEXT_ARTICLES_DIR+subdirname+'/'+file_name+'.zip'

        if not os.path.exists(STATIC_PLAINTEXT_ARTICLES_DIR+subdirname):
            os.makedirs(STATIC_PLAINTEXT_ARTICLES_DIR+subdirname)


        with codecs.open(plaintext_file_name, 'w', encoding='utf8') as outfile:
            outfile.write(plaintext)
            outfile.flush()
            outfile.close()

        zf = zipfile.ZipFile(zip_file_name, mode='w', compression=zipfile.ZIP_DEFLATED)
        try:
            zf.write(plaintext_file_name, os.path.basename(plaintext_file_name))
            os.remove(plaintext_file_name)
        except Exception, e:
            print e
            logging.error('zip error %s ' % plaintext_file_name) 
Example 4
Project: incubator-spot   Author: apache   File: utils.py    Apache License 2.0 6 votes vote down vote up
def create_oa_folders(cls, type, date):

        # create date and ingest summary folder structure if they don't' exist.
        root_path = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
        data_type_folder = "{0}/data/{1}/{2}"
        if not os.path.isdir(data_type_folder.format(root_path, type, date)): os.makedirs(
            data_type_folder.format(root_path, type, date))
        if not os.path.isdir(data_type_folder.format(root_path, type, "ingest_summary")): os.makedirs(
            data_type_folder.format(root_path, type, "ingest_summary"))

        # create ipynb folders.
        ipynb_folder = "{0}/ipynb/{1}/{2}".format(root_path, type, date)
        if not os.path.isdir(ipynb_folder): os.makedirs(ipynb_folder)

        # retun path to folders.
        data_path = data_type_folder.format(root_path, type, date)
        ingest_path = data_type_folder.format(root_path, type, "ingest_summary")
        return data_path, ingest_path, ipynb_folder 
Example 5
Project: malcode   Author: moonsea   File: copyasm.py    GNU General Public License v3.0 6 votes vote down vote up
def CopyFile(srcDir, desDir, filename):
    if not os.path.exists(desDir):
        try:
            os.makedirs(desDir)
        except:
            print '[-] Mkdir error'

    desfile = os.path.join(desDir, filename)
    if os.path.isfile(desfile):
        log('Ignore', filename, '[-]', subpath='copyasm')
        print '[-]Ignore ', filename
        return
        # os.remove(desfile)

    log('Copying', filename, subpath='copyasm')
    print '[+]Copying ', filename

    try:
        shutil.copy(srcDir, desDir)
    except:
        log('Copying error', filename, '[-]', subpath='copyasm')
        print '[-]Copy error ', filename 
Example 6
Project: malcode   Author: moonsea   File: zeustracker.py    GNU General Public License v3.0 6 votes vote down vote up
def WriteRes(malName, malTitle, malDesc, malAuthor, malDate, malSrc):
    if not os.path.exists('malcode'):
        os.makedirs('malcode')

    malTitle = malTitle.replace('<em>', '').replace('</em>', '').replace('<strong>', '').replace('</strong>', '')
    malDesc = malDesc.replace('<em>', '').replace('</em>', '').replace('<strong>', '').replace('</strong>', '')
    malAuthor = malAuthor.replace('<em>', '').replace('</em>', '').replace('<strong>', '').replace('</strong>', '')
    malDate = malDate.replace('<em>', '').replace('</em>', '').replace('<strong>', '').replace('</strong>', '')

    content = '\n'.join([malName, malTitle, malDesc, malAuthor, malDate, malSrc])

    file_path = os.path.join('malcode', malName)

    with codecs.open(file_path, 'w', 'utf-8') as file:
        file.write(content)
    # file = codecs.open(file_path, 'a+', 'utf-8')
    # file.write(content + '\n')
    # file.close 
Example 7
Project: alfred-yubikey-otp   Author: robertoriv   File: web.py    MIT License 6 votes vote down vote up
def save_to_path(self, filepath):
        """Save retrieved data to file at ``filepath``.

        .. versionadded: 1.9.6

        :param filepath: Path to save retrieved data.

        """
        filepath = os.path.abspath(filepath)
        dirname = os.path.dirname(filepath)
        if not os.path.exists(dirname):
            os.makedirs(dirname)

        self.stream = True

        with open(filepath, 'wb') as fileobj:
            for data in self.iter_content():
                fileobj.write(data) 
Example 8
Project: flask-ci   Author: vicenteneto   File: ci.py    MIT License 6 votes vote down vote up
def __call__(self, *args, **kwargs):
        output_dir = kwargs[CI.OUTPUT_DIR]
        verbose = kwargs[CI.VERBOSE]

        if not os.path.exists(output_dir):
            if verbose:
                print('Creating output directory...')
            os.makedirs(output_dir)

        for task in self.tasks:
            if verbose:
                print('Executing {0}...'.format(task.__module__))
            task.run(self.settings, **kwargs)

        if verbose:
            print('Done') 
Example 9
Project: aospy   Author: spencerahill   File: calc.py    Apache License 2.0 6 votes vote down vote up
def _save_files(self, data, dtype_out_time):
        """Save the data to netcdf files in direc_out."""
        path = self.path_out[dtype_out_time]
        if not os.path.isdir(self.dir_out):
            os.makedirs(self.dir_out)
        if 'reg' in dtype_out_time:
            try:
                reg_data = xr.open_dataset(path)
            except (EOFError, RuntimeError, IOError):
                reg_data = xr.Dataset()
            reg_data.update(data)
            data_out = reg_data
        else:
            data_out = data
        if isinstance(data_out, xr.DataArray):
            data_out = xr.Dataset({self.name: data_out})
        data_out.to_netcdf(path, engine='netcdf4') 
Example 10
Project: claxon   Author: vanatteveldt   File: preprocess.py    GNU General Public License v3.0 6 votes vote down vote up
def handle(self, *args, **options):
        project = Project.objects.get(pk=options['project_id'])
        dir = os.path.join(settings.TOKEN_DIR, "project_{}".format(project.id))
        os.makedirs(dir, exist_ok=True)
        docids = set(project.document_set.values_list("id", flat=True))
        preprocessed = {int(x) for x in os.listdir(dir)}
        todo = list(docids - preprocessed)
        if not todo:
            print("All documents already preprocessed")
            return
        print("Preprocessing {} documents".format(len(todo)))
        m = get_model(project)
        chunks = [todo[i:i+_CHUNK_SIZE] for i in range(0, len(todo), _CHUNK_SIZE)]
        for chunk in tqdm(chunks):
            docs = list(Document.objects.filter(pk__in=chunk))
            texts = [doc.text for doc in docs]
            results = m.pipe(texts, disable="textcat")
            for doc, tokens in zip(docs, results):
                fn = os.path.join(dir, str(doc.id))
                tokens.to_disk(fn) 
Example 11
Project: wechat-alfred-workflow   Author: TKkk-iOSer   File: web.py    MIT License 6 votes vote down vote up
def save_to_path(self, filepath):
        """Save retrieved data to file at ``filepath``.

        .. versionadded: 1.9.6

        :param filepath: Path to save retrieved data.

        """
        filepath = os.path.abspath(filepath)
        dirname = os.path.dirname(filepath)
        if not os.path.exists(dirname):
            os.makedirs(dirname)

        self.stream = True

        with open(filepath, 'wb') as fileobj:
            for data in self.iter_content():
                fileobj.write(data) 
Example 12
Project: RelayBot2.0   Author: nukeop   File: logs.py    GNU General Public License v3.0 6 votes vote down vote up
def __init__(self, bot):
        super(Logs, self).__init__(bot)
        self.loggers = {}
        self.logs_path = os.path.join(os.getcwd(), "logs")
        self.friend_logs_path = os.path.join(self.logs_path, "friends")
        self.group_logs_path = os.path.join(self.logs_path, "groups")

        try:
            os.makedirs(self.friend_logs_path)
        except OSError:
            logger.error("Could not create {}".format(self.friend_logs_path))

        try:
            os.makedirs(self.group_logs_path)
        except OSError:
            logger.error("Could not create {}".format(self.group_logs_path)) 
Example 13
Project: CFN-CR-PythonLambdaLayer   Author: kisst   File: lambda_function.py    GNU General Public License v3.0 6 votes vote down vote up
def dump_text_to_file(filename, text, dirpath):
    """
    Save extra functions text into file within the layer.
    """
    # dump variable's contents to a file under dirpath
    abs_dirpath = os.path.abspath(f"{dirpath}/lambdalayer")
    try:
        os.makedirs(abs_dirpath)
    except FileExistsError:
        pass
    abs_initpath = os.path.abspath(os.path.join(abs_dirpath, '__init__.py'))
    with open(abs_initpath, mode='a'):
        os.utime(abs_initpath, None)
    abs_filepath = os.path.abspath(os.path.join(abs_dirpath, filename))
    with open(abs_filepath, mode='w', encoding='utf-8') as file_var:
        file_var.write(base64.b64decode(text).decode('utf-8')) 
Example 14
Project: Traffic_sign_detection_YOLO   Author: AmeyaWagh   File: build.py    MIT License 6 votes vote down vote up
def savepb(self):
		"""
		Create a standalone const graph def that 
		C++	can load and run.
		"""
		darknet_pb = self.to_darknet()
		flags_pb = self.FLAGS
		flags_pb.verbalise = False
		
		flags_pb.train = False
		# rebuild another tfnet. all const.
		tfnet_pb = TFNet(flags_pb, darknet_pb)		
		tfnet_pb.sess = tf.Session(graph = tfnet_pb.graph)
		# tfnet_pb.predict() # uncomment for unit testing
		name = 'built_graph/{}.pb'.format(self.meta['name'])
		os.makedirs(os.path.dirname(name), exist_ok=True)
		#Save dump of everything in meta
		with open('built_graph/{}.meta'.format(self.meta['name']), 'w') as fp:
			json.dump(self.meta, fp)
		self.say('Saving const graph def to {}'.format(name))
		graph_def = tfnet_pb.sess.graph_def
		tf.train.write_graph(graph_def,'./', name, False) 
Example 15
Project: fs_image   Author: facebookincubator   File: test_yum_dnf_from_snapshot.py    MIT License 5 votes vote down vote up
def _install(self, *, protected_paths, version_lock=None):
        install_root = Path(tempfile.mkdtemp())
        try:
            # IMAGE_ROOT/meta/ is always required since it's always protected
            for p in set(protected_paths) | {'meta/'}:
                if p.endswith('/'):
                    os.makedirs(install_root / p)
                else:
                    os.makedirs(os.path.dirname(install_root / p))
                    with open(install_root / p, 'wb'):
                        pass
            snapshot_dir = Path(load_location('rpm', 'repo-snapshot'))
            # Note: this can't use `_yum_using_build_appliance` because that
            # would lose coverage info on `yum_dnf_from_snapshot.py`.  On
            # the other hand, running this test against the host is fragile
            # since it depends on the system packages available on CI
            # containers.  For this reason, this entire test is an
            # `image.python_unittest` that runs in a build appliance.
            with tempfile.NamedTemporaryFile(mode='w') as tf:
                if version_lock:
                    tf.write('\n'.join(version_lock) + '\n')
                tf.flush()
                yum_dnf_from_snapshot(
                    yum_dnf=self._YUM_DNF,
                    repo_server_bin=Path(load_location('rpm', 'repo-server')),
                    storage_cfg=json.dumps({
                        'key': 'test',
                        'kind': 'filesystem',
                        'base_dir': (snapshot_dir / 'storage').decode(),
                    }),
                    snapshot_dir=snapshot_dir,
                    install_root=Path(install_root),
                    protected_paths=protected_paths,
                    versionlock_list=tf.name,
                    yum_dnf_args=_INSTALL_ARGS,
                )
            yield install_root
        finally:
            assert install_root != '/'
            # Courtesy of `yum`, the `install_root` is now owned by root.
            subprocess.run(['sudo', 'rm', '-rf', install_root], check=True) 
Example 16
Project: fs_image   Author: facebookincubator   File: filesystem_storage.py    MIT License 5 votes vote down vote up
def writer(self) -> ContextManager[StorageOutput]:
        sid = str(uuid.uuid4()).replace('-', '')
        sid_path = self._path_for_storage_id(sid)
        try:
            os.makedirs(sid_path.dirname())
        except FileExistsError:  # pragma: no cover
            pass

        with os.fdopen(os.open(
            sid_path,
            os.O_WRONLY | os.O_CREAT | os.O_EXCL | os.O_CLOEXEC,
            mode=stat.S_IRUSR | stat.S_IRGRP | stat.S_IROTH,
        ), 'wb') as outfile:

            @contextmanager
            def get_id_and_release_resources():
                try:
                    yield sid
                finally:
                    # This `close()` flushes, making the written data readable,
                    # and prevents more writes via `StorageOutput`.
                    outfile.close()

            # `_CommitCallback` has a `try` to clean up on error. This
            # placement of the context assumes that `os.fdopen` cannot fail.
            with _CommitCallback(self, get_id_and_release_resources) as commit:
                yield StorageOutput(output=outfile, commit_callback=commit) 
Example 17
Project: fs_image   Author: facebookincubator   File: test_subvolume_garbage_collector.py    MIT License 5 votes vote down vote up
def test_gc_fails_when_wrapper_has_more_than_one(self):
        with tempfile.TemporaryDirectory() as refs_dir, \
             tempfile.TemporaryDirectory() as subs_dir:
            os.makedirs(os.path.join(subs_dir, 'no:refs/subvol1'))
            os.makedirs(os.path.join(subs_dir, 'no:refs/subvol2'))
            with self.assertRaisesRegex(
                RuntimeError, 'must contain only the subvol'
            ):
                sgc.subvolume_garbage_collector([
                    '--refcounts-dir', refs_dir,
                    '--subvolumes-dir', subs_dir,
                ]) 
Example 18
Project: fs_image   Author: facebookincubator   File: test_subvolume_on_disk.py    MIT License 5 votes vote down vote up
def test_from_subvolume_path(self):
        with tempfile.TemporaryDirectory() as td:
            # Note: Unlike test_from_serializable_dict_and_validation, this
            # test does NOT use a trailing / (to increase coverage).
            subvols = td.rstrip('/')
            rel_path = 'test_rule:vvv/test:subvol'
            subvol_path = os.path.join(subvols, rel_path)
            os.makedirs(subvol_path)  # `from_serializable_dict` checks this

            subvol = subvolume_on_disk.SubvolumeOnDisk.from_subvolume_path(
                subvol_path=subvol_path, subvolumes_dir=subvols,
            )
            with unittest.mock.patch('os.listdir') as listdir:
                listdir.return_value = ['test:subvol']
                self._check(
                    subvol,
                    subvol_path,
                    subvolume_on_disk.SubvolumeOnDisk(**{
                        subvolume_on_disk._BTRFS_UUID:
                            self._test_uuid(subvol_path),
                        subvolume_on_disk._BTRFS_PARENT_UUID: 'zupa',
                        subvolume_on_disk._HOSTNAME: _MY_HOST,
                        subvolume_on_disk._SUBVOLUME_REL_PATH: rel_path,
                        subvolume_on_disk._SUBVOLUMES_BASE_DIR: subvols,
                    }),
                )
                self.assertEqual(
                    listdir.call_args_list,
                    [((os.path.dirname(subvol_path),),)] * 2,
                )

            with self.assertRaisesRegex(
                RuntimeError, 'must be located inside the subvolumes directory'
            ):
                subvolume_on_disk.SubvolumeOnDisk.from_subvolume_path(
                    subvol_path=subvol_path, subvolumes_dir=subvols + '/bad',
                ) 
Example 19
Project: fs_image   Author: facebookincubator   File: common.py    MIT License 5 votes vote down vote up
def populate_temp_filesystem(img_path):
    'Matching Provides are generated by _temp_filesystem_provides'

    def p(img_rel_path):
        return os.path.join(img_path, img_rel_path)

    os.makedirs(p('a/b/c'))
    os.makedirs(p('a/d'))

    for filepath in ['a/E', 'a/d/F', 'a/b/c/G']:
        with open(p(filepath), 'w') as f:
            f.write('Hello, ' + filepath) 
Example 20
Project: leapp-repository   Author: oamg   File: actor.py    Apache License 2.0 5 votes vote down vote up
def generate_link_file(self, interface):

        try:
            os.makedirs('/etc/systemd/network')
        except OSError as e:
            if e.errno != errno.EEXIST:
                raise

        link_file = '/etc/systemd/network/10-leapp-{}.link'.format(interface.name)
        with open(link_file, 'w') as f:
            f.write(LINK_FILE_TEMPLATE.format(interface.mac, interface.name))

        self.initrd_files.append(link_file) 
Example 21
Project: leapp-repository   Author: oamg   File: mounting.py    Apache License 2.0 5 votes vote down vote up
def _makedirs(path, mode=0o777, exists_ok=True):
    """ Helper function which extends os.makedirs with exists_ok on all versions of python. """
    try:
        os.makedirs(path, mode=mode)
    except OSError:
        if not exists_ok or not os.path.isdir(path):
            raise 
Example 22
Project: leapp-repository   Author: oamg   File: mounting.py    Apache License 2.0 5 votes vote down vote up
def makedirs(self, path, mode=0o777, exists_ok=True):
        """
        Creates the whole path recursively for any missing part.
        """
        _makedirs(path=self.full_path(path), mode=mode, exists_ok=exists_ok) 
Example 23
Project: Coulomb   Author: DynamoDS   File: extract_sessions_buffered.py    MIT License 5 votes vote down vote up
def flush(outpath, session_maps):

    lns_count = 0
    for sessions_lst in session_maps.values():
        lns_count += len(sessions_lst)

    log ("Flushing lines/sessions: " + str(lns_count) + " / " + str(len(session_maps.keys())))

    # Verify that the folders exist
    for session in session_maps.keys():
        sessions_folder_path = join(outPath, session[0:3])
        if sessions_folder_path not in existing_sessions_path:
            if not os.path.exists(sessions_folder_path):
                os.makedirs(sessions_folder_path)
            existing_sessions_path.add(sessions_folder_path)

        sessionPath = join(sessions_folder_path, session + ".gz")
        if not os.path.exists(sessionPath):
            newSessionIDSet.add(session)
            

        o = gzip.open(sessionPath, 'a')
        # o = open(join(outPath, session + ".json"), 'a')
        for ln in session_maps[session]:
            o.write(ln)
        o.flush()

        f = open(completedInputListPath, 'a')
        for filePath in completedInputFiles_buffer:
            completedInputFiles.add(filePath)
            f.write(filePath + "\n")
        f.flush()
        completedInputFiles_buffer.clear()

    log ("Flushing complete. Total sessions:\t" + str(len(sessionIDSet)) + "\tTotal new sessions:\t" + str(len(newSessionIDSet))) 
Example 24
Project: mlbv   Author: kmac   File: request.py    GNU General Public License v3.0 5 votes vote down vote up
def _get_cachedir():
    cachedir = os.path.join(util.get_tempdir(), 'cache')
    if not os.path.exists(cachedir):
        LOG.debug('Creating cache directory: ' + cachedir)
    if not os.path.exists(cachedir):
        os.makedirs(cachedir)
    return cachedir 
Example 25
Project: mlbv   Author: kmac   File: config.py    GNU General Public License v3.0 5 votes vote down vote up
def generate_config(username=None, password=None, servicename="MLB.tv"):
        """Creates config file from template + user prompts."""
        script_name = os.path.splitext(os.path.basename(sys.argv[0]))[0]
        # use the script name minus any extension for the config directory
        config_dir = None
        config_dir = os.path.join(Config.config_dir_roots[1], script_name)
        if not os.path.exists(config_dir):
            print("Creating config directory: {}".format(config_dir))
            os.makedirs(config_dir)
        config_file = os.path.join(config_dir, 'config')
        if os.path.exists(config_file):
            print("Aborting: The config file already exists at '{}'".format(config_file))
            return False

        # copy the template config file
        print("Generating basic config file at: {}".format(config_dir))
        current_dir = os.path.dirname(inspect.getfile(inspect.currentframe()))
        template_config_path = os.path.abspath(os.path.join(current_dir, '../../..', 'config.template'))
        if not os.path.exists(template_config_path):
            print("Could not find template config file [expected at: {}]".format(template_config_path))
            return False

        if username is None:
            username = input('Enter {} username: '.format(servicename))
        if password is None:
            password = input('Enter {} password: '.format(servicename))

        with open(template_config_path, 'r') as infile, open(config_file, 'w') as outfile:
            for line in infile:
                if line.startswith('# username='):
                    outfile.write("username={}\n".format(username))
                elif line.startswith('# password='):
                    outfile.write("password={}\n".format(password))
                else:
                    outfile.write(line)
        print("Finished creating config file: {}".format(config_file))
        print("You may want to edit it now to set up favourites, etc.")
        return True 
Example 26
Project: godot-mono-builds   Author: godotengine   File: os_utils.py    MIT License 5 votes vote down vote up
def mkdir_p(path):
    if not os.path.exists(path):
        print('creating directory: ' + path)
        os.makedirs(path)


# Remove files and/or directories recursively 
Example 27
Project: chainer-openai-transformer-lm   Author: soskek   File: utils.py    MIT License 5 votes vote down vote up
def make_path(f):
    d = os.path.dirname(f)
    if d and not os.path.exists(d):
        os.makedirs(d)
    return f 
Example 28
Project: chainer-openai-transformer-lm   Author: soskek   File: train.py    MIT License 5 votes vote down vote up
def predict():
    filename = filenames[dataset]
    pred_fn = pred_fns[dataset]
    label_decoder = label_decoders[dataset]
    predictions = pred_fn(iter_predict(teX, teM))
    if label_decoder is not None:
        predictions = [label_decoder[prediction] for prediction in predictions]
    path = os.path.join(submission_dir, filename)
    os.makedirs(os.path.dirname(path), exist_ok=True)
    with open(path, 'w') as f:
        f.write('{}\t{}\n'.format('index', 'prediction'))
        for i, prediction in enumerate(predictions):
            f.write('{}\t{}\n'.format(i, prediction)) 
Example 29
Project: csv-to-calendar   Author: egeldenhuys   File: upload.py    GNU General Public License v3.0 5 votes vote down vote up
def get_credentials():
    """Gets valid user credentials from storage.

    If nothing has been stored, or if the stored credentials are invalid,
    the OAuth2 flow is completed to obtain the new credentials.

    Returns:
        Credentials, the obtained credential.
    """
    home_dir = os.path.expanduser('~')
    credential_dir = os.path.join(home_dir, '.credentials')
    if not os.path.exists(credential_dir):
        os.makedirs(credential_dir)
    credential_path = os.path.join(credential_dir,
                                   'calendar-python-quickstart.json')

    store = Storage(credential_path)
    credentials = store.get()
    if not credentials or credentials.invalid:
        flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)
        flow.user_agent = APPLICATION_NAME
        if flags:
            credentials = tools.run_flow(flow, store, flags)
        else:  # Needed only for compatibility with Python 2.6
            credentials = tools.run(flow, store)
        print('Storing credentials to ' + credential_path)
    return credentials 
Example 30
Project: malcode   Author: moonsea   File: zeustracker.py    GNU General Public License v3.0 5 votes vote down vote up
def GetFile(url, file):
    # url = ''.join(['http://vxheaven.org/dl/', file])

    if not os.path.exists('resource/zeustracker'):
        os.makedirs('resource/zeustracker')

    file_path = os.path.join('resource/zeustracker', file)

    print '[+] start downloading ', file
    urllib.urlretrieve(url, file_path) 
Example 31
Project: malcode   Author: moonsea   File: filetype.py    GNU General Public License v3.0 5 votes vote down vote up
def CopyFile(srcDir, desDir):
    if not os.path.exists(desDir):
        try:
            os.makedirs(desDir)
        except:
            print '[-] Mkdir error'

    print '[+]Copying ', srcDir

    try:
        shutil.copy(srcDir, desDir)
    except:
        print '[-]Copy error' 
Example 32
Project: malcode   Author: moonsea   File: filtersamemd5.py    GNU General Public License v3.0 5 votes vote down vote up
def checkDir(dir):
    if not os.path.exists(dir):
        try:
            os.makedirs(dir)
        except:
            print '[-] Mkdir error'
    print '[+]' + dir + ' is ok' 
Example 33
Project: malcode   Author: moonsea   File: vxheaven.py    GNU General Public License v3.0 5 votes vote down vote up
def GetFile(url, file):
    # url = ''.join(['http://vxheaven.org/dl/', file])

    if not os.path.exists('malcode/src'):
        os.makedirs('malcode/src')

    file_path = os.path.join('malcode', file)

    print '[+] start downloading ', file
    urllib.urlretrieve(url, file_path) 
Example 34
Project: malcode   Author: moonsea   File: wingenasm.py    GNU General Public License v3.0 5 votes vote down vote up
def log(action, content, prefix='[+]', suffix='', subpath=''):
    logDir = os.path.join(logPath, subpath)
    if not os.path.exists(logDir):
        try:
            os.makedirs(logDir)
        except:
            print '[-] Mkdir error'

    logpath = os.path.join(logDir, logName)

    with open(logpath, 'a+') as logfile:
        logfile.write(''.join([prefix, getNowTime(), ' ', action, ' ', content, suffix, '\n'])) 
Example 35
Project: malcode   Author: moonsea   File: getOpCode.py    GNU General Public License v3.0 5 votes vote down vote up
def checkDir(dirpath):
    if not os.path.exists(dirpath):
        try:
            os.makedirs(dirpath)
        except:
            print '[-] Mkdir error' 
Example 36
Project: malcode   Author: moonsea   File: virusgenasm.py    GNU General Public License v3.0 5 votes vote down vote up
def checkDir(dir):
    if not os.path.exists(dir):
        try:
            os.makedirs(dir)
        except:
            print '[-] Mkdir error'
    print '[+]' + dir + ' is ok' 
Example 37
Project: malcode   Author: moonsea   File: getOpCodeNdisasm.py    GNU General Public License v3.0 5 votes vote down vote up
def checkDir(dirpath):
    if not os.path.exists(dirpath):
        try:
            os.makedirs(dirpath)
        except:
            print '[-] Mkdir error' 
Example 38
Project: malcode   Author: moonsea   File: dosgenasm.py    GNU General Public License v3.0 5 votes vote down vote up
def log(action, content, prefix='[+]', suffix=''):
    if not os.path.exists(logPath):
        try:
            os.makedirs(logPath)
        except:
            print '[-] Mkdir error'

    log = ''.join([prefix, getNowTime(), ' ', action, ' ', content, suffix, '\n'])
    print log
	
    logpath = os.path.join(logPath, logName)

    with open(logpath, 'a+') as logfile:
        # logfile.write(''.join([prefix, getNowTime(), ' ', action, ' ', content, suffix, '\n']))
        logfile.write(log) 
Example 39
Project: malcode   Author: moonsea   File: benignasm.py    GNU General Public License v3.0 5 votes vote down vote up
def log(action, content, prefix='[+]', suffix='', subpath=''):
    logDir = os.path.join(logPath, subpath)
    if not os.path.exists(logDir):
        try:
            os.makedirs(logDir)
        except:
            print '[-] Mkdir error'

    logpath = os.path.join(logDir, logName)

    with open(logpath, 'a+') as logfile:
        logfile.write(''.join([prefix, getNowTime(), ' ', action, ' ', content, suffix, '\n'])) 
Example 40
Project: alfred-yubikey-otp   Author: robertoriv   File: workflow.py    MIT License 5 votes vote down vote up
def _create(self, dirpath):
        """Create directory `dirpath` if it doesn't exist.

        :param dirpath: path to directory
        :type dirpath: ``unicode``
        :returns: ``dirpath`` argument
        :rtype: ``unicode``

        """
        if not os.path.exists(dirpath):
            os.makedirs(dirpath)
        return dirpath 
Example 41
Project: pyblish-win   Author: pyblish   File: dist.py    GNU Lesser General Public License v3.0 5 votes vote down vote up
def bundle(src, dst):
    """Bundle files from `src` into `dst`

    Arguments:
        src (str): Source directory of files to bundle, e.g. /.
        dst (str): Output directory in which to copy files /build

    """

    print("Collecting files..")

    col = collect(src)

    print("Copying files into /build")
    for fname in col:
        out = os.path.join(dst, fname)

        try:
            os.makedirs(os.path.dirname(out))
        except WindowsError:
            pass

        shutil.copyfile(src=fname, dst=out)

    # Replace with a light-weight version
    shutil.copy(src=os.path.join(src, "icudt53.dll"),
                dst=os.path.join(dst, "lib", "python-qt5", "PyQt5"))

    print("Build finished successfully.")

    return dst 
Example 42
Project: pyblish-win   Author: pyblish   File: main.py    GNU Lesser General Public License v3.0 5 votes vote down vote up
def write_file(self, new_text, filename, old_text, encoding):
        orig_filename = filename
        if self._output_dir:
            if filename.startswith(self._input_base_dir):
                filename = os.path.join(self._output_dir,
                                        filename[len(self._input_base_dir):])
            else:
                raise ValueError('filename %s does not start with the '
                                 'input_base_dir %s' % (
                                         filename, self._input_base_dir))
        if self._append_suffix:
            filename += self._append_suffix
        if orig_filename != filename:
            output_dir = os.path.dirname(filename)
            if not os.path.isdir(output_dir):
                os.makedirs(output_dir)
            self.log_message('Writing converted %s to %s.', orig_filename,
                             filename)
        if not self.nobackups:
            # Make backup
            backup = filename + ".bak"
            if os.path.lexists(backup):
                try:
                    os.remove(backup)
                except os.error, err:
                    self.log_message("Can't remove backup %s", backup)
            try:
                os.rename(filename, backup)
            except os.error, err:
                self.log_message("Can't rename %s to %s", filename, backup)
        # Actually write the new file 
Example 43
Project: pyblish-win   Author: pyblish   File: install.py    GNU Lesser General Public License v3.0 5 votes vote down vote up
def create_home_path(self):
        """Create directories under ~
        """
        if not self.user:
            return
        home = convert_path(os.path.expanduser("~"))
        for name, path in self.config_vars.iteritems():
            if path.startswith(home) and not os.path.isdir(path):
                self.debug_print("os.makedirs('%s', 0700)" % path)
                os.makedirs(path, 0700)

    # -- Command execution methods ------------------------------------- 
Example 44
Project: pyblish-win   Author: pyblish   File: test_all.py    GNU Lesser General Public License v3.0 5 votes vote down vote up
def get_new_environment_path() :
    path=get_new_path("environment")
    import os
    try:
        os.makedirs(path,mode=0700)
    except os.error:
        test_support.rmtree(path)
        os.makedirs(path)
    return path 
Example 45
Project: pyblish-win   Author: pyblish   File: test_os.py    GNU Lesser General Public License v3.0 5 votes vote down vote up
def test_makedir(self):
        base = test_support.TESTFN
        path = os.path.join(base, 'dir1', 'dir2', 'dir3')
        os.makedirs(path)             # Should work
        path = os.path.join(base, 'dir1', 'dir2', 'dir3', 'dir4')
        os.makedirs(path)

        # Try paths with a '.' in them
        self.assertRaises(OSError, os.makedirs, os.curdir)
        path = os.path.join(base, 'dir1', 'dir2', 'dir3', 'dir4', 'dir5', os.curdir)
        os.makedirs(path)
        path = os.path.join(base, 'dir1', os.curdir, 'dir2', 'dir3', 'dir4',
                            'dir5', 'dir6')
        os.makedirs(path) 
Example 46
Project: pyblish-win   Author: pyblish   File: test_glob.py    GNU Lesser General Public License v3.0 5 votes vote down vote up
def mktemp(self, *parts):
        filename = self.norm(*parts)
        base, file = os.path.split(filename)
        if not os.path.exists(base):
            os.makedirs(base)
        f = open(filename, 'w')
        f.close() 
Example 47
Project: arm_now   Author: nongiach   File: download.py    MIT License 5 votes vote down vote up
def download_from_github(arch):
    templates = str(Path.home()) + "/.config/arm_now/templates/"
    os.makedirs(templates)
    filename = arch + ".tar.xz"
    URL = "https://github.com/nongiach/arm_now_templates/raw/master/"
    download(URL + filename, templates + filename, Config.DOWNLOAD_CACHE_DIR) 
Example 48
Project: arm_now   Author: nongiach   File: arm_now.py    MIT License 5 votes vote down vote up
def do_offline():
    URL = "https://github.com/nongiach/arm_now_templates/archive/master.zip"
    templates = str(Path.home()) + "/.config/arm_now/templates/"
    master_zip = str(Path.home()) + "/.config/arm_now/templates/master.zip"
    os.makedirs(templates)
    # download_from_github(arch)
    download(URL, master_zip, Config.DOWNLOAD_CACHE_DIR)
    os.chdir(templates)
    check_call("unzip master.zip", shell=True)
    check_call("mv arm_now_templates-master/* .", shell=True)
    check_call("rm -rf arm_now_templates-master/ README.md master.zip", shell=True) 
Example 49
Project: robust-atd   Author: shadowbq   File: __init__.py    MIT License 5 votes vote down vote up
def mkdir_p(self, path):
        try:
            os.makedirs(path)
            if self.options.verbosity:
                print ('mkdir_p %s' % path)
                sys.stdout.flush()
        except OSError as exc:  # Python >2.5
            if exc.errno == errno.EEXIST and os.path.isdir(path):
                pass
            else:
                raise 
Example 50
Project: wechat-alfred-workflow   Author: TKkk-iOSer   File: workflow.py    MIT License 5 votes vote down vote up
def _create(self, dirpath):
        """Create directory `dirpath` if it doesn't exist.

        :param dirpath: path to directory
        :type dirpath: ``unicode``
        :returns: ``dirpath`` argument
        :rtype: ``unicode``

        """
        if not os.path.exists(dirpath):
            os.makedirs(dirpath)
        return dirpath 
Example 51
Project: Collaborative-Learning-for-Weakly-Supervised-Object-Detection   Author: Sunarker   File: train_val.py    MIT License 5 votes vote down vote up
def __init__(self, network, imdb, roidb, valroidb, output_dir, tbdir, pretrained_model=None, wsddn_premodel=None):
    self.net = network
    self.imdb = imdb
    self.roidb = roidb
    self.valroidb = valroidb
    self.output_dir = output_dir
    self.tbdir = tbdir
    # Simply put '_val' at the end to save the summaries from the validation set
    self.tbvaldir = tbdir + '_val'
    if not os.path.exists(self.tbvaldir):
      os.makedirs(self.tbvaldir)
    self.pretrained_model = pretrained_model
    self.wsddn_premodel = wsddn_premodel 
Example 52
Project: Collaborative-Learning-for-Weakly-Supervised-Object-Detection   Author: Sunarker   File: train_val.py    MIT License 5 votes vote down vote up
def snapshot(self, iter):
    net = self.net

    if not os.path.exists(self.output_dir):
      os.makedirs(self.output_dir)

    # Store the model snapshot
    filename = cfg.TRAIN.SNAPSHOT_PREFIX + '_iter_{:d}'.format(iter) + '.pth'
    filename = os.path.join(self.output_dir, filename)
    torch.save(self.net.state_dict(), filename)
    print('Wrote snapshot to: {:s}'.format(filename))
    
    
    if iter % 10000 == 0:
        shutil.copyfile(filename, filename + '.{:d}_cache'.format(iter))
    
    # Also store some meta information, random state, etc.
    nfilename = cfg.TRAIN.SNAPSHOT_PREFIX + '_iter_{:d}'.format(iter) + '.pkl'
    nfilename = os.path.join(self.output_dir, nfilename)
    # current state of numpy random
    st0 = np.random.get_state()
    # current position in the database
    cur = self.data_layer._cur
    # current shuffled indexes of the database
    perm = self.data_layer._perm
    # current position in the validation database
    cur_val = self.data_layer_val._cur
    # current shuffled indexes of the validation database
    perm_val = self.data_layer_val._perm

    # Dump the meta info
    with open(nfilename, 'wb') as fid:
      pickle.dump(st0, fid, pickle.HIGHEST_PROTOCOL)
      pickle.dump(cur, fid, pickle.HIGHEST_PROTOCOL)
      pickle.dump(perm, fid, pickle.HIGHEST_PROTOCOL)
      pickle.dump(cur_val, fid, pickle.HIGHEST_PROTOCOL)
      pickle.dump(perm_val, fid, pickle.HIGHEST_PROTOCOL)
      pickle.dump(iter, fid, pickle.HIGHEST_PROTOCOL)

    return filename, nfilename 
Example 53
Project: Collaborative-Learning-for-Weakly-Supervised-Object-Detection   Author: Sunarker   File: config.py    MIT License 5 votes vote down vote up
def get_output_dir(imdb, weights_filename):
  """Return the directory where experimental artifacts are placed.
  If the directory does not exist, it is created.

  A canonical path is built using the name from an imdb and a network
  (if not None).
  """
  outdir = osp.abspath(osp.join(__C.ROOT_DIR, 'output', __C.EXP_DIR, imdb.name))
  if weights_filename is None:
    weights_filename = 'default'
  outdir = osp.join(outdir, weights_filename)
  if not os.path.exists(outdir):
    os.makedirs(outdir)
  return outdir 
Example 54
Project: Collaborative-Learning-for-Weakly-Supervised-Object-Detection   Author: Sunarker   File: config.py    MIT License 5 votes vote down vote up
def get_output_tb_dir(imdb, weights_filename):
  """Return the directory where tensorflow summaries are placed.
  If the directory does not exist, it is created.

  A canonical path is built using the name from an imdb and a network
  (if not None).
  """
  outdir = osp.abspath(osp.join(__C.ROOT_DIR, 'tensorboard', __C.EXP_DIR, imdb.name))
  if weights_filename is None:
    weights_filename = 'default'
  outdir = osp.join(outdir, weights_filename)
  if not os.path.exists(outdir):
    os.makedirs(outdir)
  return outdir 
Example 55
Project: Collaborative-Learning-for-Weakly-Supervised-Object-Detection   Author: Sunarker   File: imdb.py    MIT License 5 votes vote down vote up
def cache_path(self):
    cache_path = osp.abspath(osp.join(cfg.DATA_DIR, 'cache'))
    if not os.path.exists(cache_path):
      os.makedirs(cache_path)
    return cache_path 
Example 56
Project: Flask-Python-GAE-Login-Registration   Author: orymeyer   File: cache.py    Apache License 2.0 5 votes vote down vote up
def __init__(self, cache_dir, threshold=500, default_timeout=300, mode=0o600):
        BaseCache.__init__(self, default_timeout)
        self._path = cache_dir
        self._threshold = threshold
        self._mode = mode

        try:
            os.makedirs(self._path)
        except OSError as ex:
            if ex.errno != errno.EEXIST:
                raise 
Example 57
Project: Flask-Python-GAE-Login-Registration   Author: orymeyer   File: cache.py    Apache License 2.0 5 votes vote down vote up
def __init__(self, cache_dir, threshold=500, default_timeout=300, mode=0o600):
        BaseCache.__init__(self, default_timeout)
        self._path = cache_dir
        self._threshold = threshold
        self._mode = mode

        try:
            os.makedirs(self._path)
        except OSError as ex:
            if ex.errno != errno.EEXIST:
                raise 
Example 58
Project: convseg   Author: chqiwang   File: cws.py    MIT License 5 votes vote down vote up
def evaluator(data, output_dir, output_flag):
    """
    Evaluate presion, recall and F1.
    """
    seqs, gold_stags, pred_stags = data
    assert len(seqs) == len(gold_stags) == len(pred_stags)
    # Create and open temp files.
    if not os.path.exists(output_dir):
        os.makedirs(output_dir)

    ref_path = os.path.join(output_dir, '%s.ref' % output_flag)
    pred_path = os.path.join(output_dir, '%s.pred' % output_flag)
    score_path = os.path.join(output_dir, '%s.score' % output_flag)
    # Empty words file.
    temp_path = os.path.join(output_dir, '%s.temp' % output_flag)

    ref_file = codecs.open(ref_path, 'w', 'utf8')
    pred_file = codecs.open(pred_path, 'w', 'utf8')
    for l in create_output(seqs, gold_stags):
        print(l, file=ref_file)
    for i, l in enumerate(create_output(seqs, pred_stags)):
        print(l, file=pred_file)
    ref_file.close()
    pred_file.close()

    os.system('echo > %s' % temp_path)
    os.system('%s  %s %s %s > %s' % ('./score.perl', temp_path, ref_path, pred_path, score_path))
    # Sighan evaluation results
    os.system('tail -n 7 %s > %s' % (score_path, temp_path))
    eval_lines = [l.rstrip() for l in codecs.open(temp_path, 'r', 'utf8')]
    # Remove temp files.
    os.remove(ref_path)
    os.remove(pred_path)
    os.remove(score_path)
    os.remove(temp_path)
    # Precision, Recall and F1 score
    return (float(eval_lines[1].split(':')[1]),
            float(eval_lines[0].split(':')[1]),
            float(eval_lines[2].split(':')[1])) 
Example 59
Project: wikilinks   Author: trovdimi   File: decorators.py    MIT License 5 votes vote down vote up
def save(self):
        if not self.cache:
            return
        if not self.modified:
            # no need to save unmodified cache
            return
        if not os.path.exists(CACHE_FOLDER):
            os.makedirs(CACHE_FOLDER)
        with open(self.filepath, 'wb') as outfile:
            pickle.dump(self.cache, outfile, -1) 
Example 60
Project: mietmap-scraper   Author: CodeforKarlsruhe   File: scrape.py    MIT License 5 votes vote down vote up
def mkdirs(path):
    """
    Recursively create directories.

    Like ``os.makedirs``, but does not raise an error if the directory
    already exists.
    """
    try:
        os.makedirs(path)
    except OSError as e:
        if e.errno != errno.EEXIST:
            raise 
Example 61
Project: AutoDL   Author: tanguofu   File: setup.py    BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def test_compile(build_ext,
                 name,
                 code,
                 libraries=None,
                 include_dirs=None,
                 library_dirs=None,
                 macros=None,
                 extra_preargs=None):
    #test_compile_dir = os.path.join(build_ext.build_temp, 'test_compile')
    #
    test_compile_dir = build_ext.build_temp + "/test"
    if not os.path.exists(test_compile_dir):
        os.makedirs(test_compile_dir)

    source_file = os.path.join(test_compile_dir, '%s.cc' % name)
    with open(source_file, 'w') as f:
        f.write(code)

    compiler = build_ext.compiler
    suffix = sysconfig.get_config_var("EXT_SUFFIX")

    [object_file] = compiler.object_filenames([source_file])

    #shared_object_file = compiler.shared_object_filename(
    #    name+suffix, output_dir=test_compile_dir)
    shared_object_file = test_compile_dir + '/' + name + suffix

    compiler.compile(
        [source_file],
        extra_preargs=extra_preargs,
        include_dirs=include_dirs,
        macros=macros)
    compiler.link_shared_object(
        [object_file],
        shared_object_file,
        libraries=libraries,
        library_dirs=library_dirs)

    return shared_object_file 
Example 62
Project: AutoDL   Author: tanguofu   File: setup.py    BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def test_compile_run(build_ext,
                     name,
                     code_file,
                     libraries=None,
                     include_dirs=None,
                     library_dirs=None,
                     macros=None,
                     extra_preargs=None):

    test_compile_dir = os.path.join(build_ext.build_temp, 'test_compile')
    if not os.path.exists(test_compile_dir):
        os.makedirs(test_compile_dir)

    source_file = code_file

    compiler = build_ext.compiler

    [object_file] = compiler.object_filenames(
        [source_file], output_dir=test_compile_dir)
    print(object_file)

    execute_object_file = compiler.executable_filename(
        name, output_dir=test_compile_dir)

    compiler.compile(
        [source_file],
        output_dir=test_compile_dir,
        extra_preargs=extra_preargs,
        include_dirs=include_dirs,
        macros=macros)
    compiler.link_executable(
        [object_file],
        execute_object_file,
        libraries=libraries,
        library_dirs=library_dirs)

    return execute_object_file 
Example 63
Project: slidoc   Author: mitotic   File: sdproxy.py    BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def backupSheets(dirpath):
    # Returns null string on success or error string list
    # (synchronous)
    if Global.previewStatus:
        return [ 'Cannot backup when previewing session '+Global.previewStatus['sessionName']+' in site '+Settings['site_name'] ]

    suspend_cache('backup')
    if Settings['debug']:
        print("DEBUG:backupSheets: %s started %s" % (dirpath, datetime.datetime.now()), file=sys.stderr)
    errorList = []
    try:
        if not os.path.exists(dirpath):
            os.makedirs(dirpath)

        sessionAttributes = None
        for sheetName in BACKUP_SHEETS:
            rows = backupSheet(sheetName, dirpath, errorList, optional=True)
            if sheetName == INDEX_SHEET and rows and 'id' in rows[0]:
                try:
                    idCol = rows[0].index('id')
                    attributesCol = rows[0].index('attributes')
                    sessionAttributes = [(row[idCol], json.loads(row[attributesCol])) for row in rows[1:]]
                except Exception, excp:
                    errorList.append('Error: Session attributes not loadable %s' % excp)

        if sessionAttributes is None and not errorList:
            errorList.append('Error: Session attributes not found in index sheet %s' % INDEX_SHEET)

        for name, attributes in (sessionAttributes or []):
            backupSheet(name, dirpath, errorList)
            if attributes.get('discussSlides'):
                backupSheet(name+'_discuss', dirpath, errorList, optional=True) 
Example 64
Project: Traffic_sign_detection_YOLO   Author: AmeyaWagh   File: cli.py    MIT License 5 votes vote down vote up
def cliHandler(args):
    FLAGS = argHandler()
    FLAGS.setDefaults()
    FLAGS.parseArgs(args)

    # make sure all necessary dirs exist
    def _get_dir(dirs):
        for d in dirs:
            this = os.path.abspath(os.path.join(os.path.curdir, d))
            if not os.path.exists(this): os.makedirs(this)
    
    requiredDirectories = [FLAGS.imgdir, FLAGS.binary, FLAGS.backup, os.path.join(FLAGS.imgdir,'out')]
    if FLAGS.summary:
        requiredDirectories.append(FLAGS.summary)

    _get_dir(requiredDirectories)

    # fix FLAGS.load to appropriate type
    try: FLAGS.load = int(FLAGS.load)
    except: pass

    tfnet = TFNet(FLAGS)
    
    if FLAGS.demo:
        tfnet.camera()
        exit('Demo stopped, exit.')

    if FLAGS.train:
        print('Enter training ...'); tfnet.train()
        if not FLAGS.savepb: 
            exit('Training finished, exit.')

    if FLAGS.savepb:
        print('Rebuild a constant version ...')
        tfnet.savepb(); exit('Done')

    tfnet.predict() 
Example 65
Project: Traffic_sign_detection_YOLO   Author: AmeyaWagh   File: test_darkflow.py    MIT License 5 votes vote down vote up
def download_file(url, savePath):
    fileName = savePath.split("/")[-1]
    if not os.path.isfile(savePath):
        os.makedirs(os.path.dirname(savePath), exist_ok=True) #Make directories nessecary for file incase they don't exist
        print("Downloading " + fileName + " file...")
        r = requests.get(url, stream=True)
        with open(savePath, 'wb') as f:
            for chunk in r.iter_content(chunk_size=1024): 
                if chunk: # filter out keep-alive new chunks
                    f.write(chunk)
        r.close()
    else:
        print("Found existing " + fileName + " file.") 
Example 66
Project: ieml   Author: IEMLdev   File: ieml_database.py    GNU General Public License v3.0 5 votes vote down vote up
def __init__(self, folder,
                 cache_folder=None,
                 use_cache=True):
        self.folder = folder

        self.use_cache = use_cache
        self.cache_folder = cache_folder
        if self.use_cache:
            if cache_folder is None:
                self.cache_folder = self.folder
            elif not os.path.isdir(self.cache_folder):
                os.makedirs(self.cache_folder)
                # raise ValueError("Folder '{}' does not exists.".format(self.cache_folder))
        else:
            self.cache_folder = None 
Example 67
Project: ieml   Author: IEMLdev   File: ieml_database.py    GNU General Public License v3.0 5 votes vote down vote up
def path_of(self, _ieml, descriptor=True, mkdir=False, normalize=True):
        if isinstance(_ieml, str):
            ieml = IEMLParser().parse(_ieml)
        else:
            ieml = _ieml

        if descriptor:
            ext = '.desc'
        else:
            ext = '.ieml'

        class_folder, prefix_sixe = self.CLASS_TO_FOLDER[ieml.__class__]


        if normalize:
            filename = self.filename_of(ieml)
        else:
            filename = self.filename_of(_ieml)

        prefix = filename[:prefix_sixe]

        p = os.path.join(self.folder, class_folder,
                         'singular' if len(ieml) == 1 else 'paradigm', prefix)
        if mkdir:
            os.makedirs(p, exist_ok=True)

        return os.path.join(p, filename + ext) 
Example 68
Project: pydockenv   Author: se7entyse7en   File: base.py    Apache License 2.0 5 votes vote down vote up
def setUp(self):
        self._cwd = os.getcwd()
        self._test_dir = Path(definitions.ROOT_DIR, '.test-dir')
        self._projs_dir = Path(str(self._test_dir), 'projs')

        self._commander = Commander()

        self._env_index = 1
        os.makedirs(str(self._projs_dir)) 
Example 69
Project: fs_image   Author: facebookincubator   File: temp_repos.py    MIT License 4 votes vote down vote up
def make_repo_steps(
    out_dir: Path, repo_change_steps: List[Dict[str, Repo]], arch: str,
    avoid_symlinks: bool = False,
):
    # When an RPM occurs in two different repos, we want it to be
    # bit-identical (otherwise, the snapshot would see a `mutable_rpm`
    # error).  This means never rebuilding an RPM that was previously seen.
    # The paths are relative to `out_dir`.
    rpm_to_path = {}
    # The repos that exist at the current step.
    repos = {}
    for step, repo_changes in enumerate(repo_change_steps):
        step = Path(str(step))
        for repo_name, repo in repo_changes.items():
            if repo is None:
                del repos[repo_name]
            else:
                repos[repo_name] = repo
        step_dir = out_dir / step
        os.makedirs(step_dir)
        yum_dnf_conf = ConfigParser()
        yum_dnf_conf['main'] = {}
        for repo_name, repo in repos.items():
            repo_dir = step_dir / repo_name
            yum_dnf_conf[repo_name] = {'baseurl': repo_dir.file_url()}
            if isinstance(repo, str):  # Alias of another repo
                assert repo in repos
                if avoid_symlinks:
                    shutil.copytree(step_dir / repo, repo_dir)
                else:
                    os.symlink(repo, repo_dir)
                continue
            # Each repo's package dir is different to exercise the fact
            # that the same file's location may differ across repos.
            package_dir = repo_dir / f'{repo_name}-pkgs'
            os.makedirs(package_dir)
            for rpm in repo.rpms:
                prev_path = rpm_to_path.get(rpm)
                if prev_path and avoid_symlinks:
                    shutil.copy(
                        out_dir / prev_path,
                        package_dir / prev_path.basename(),
                    )
                elif prev_path:
                    os.symlink(
                        '../../..' / prev_path,
                        package_dir / prev_path.basename(),
                    )
                else:
                    rpm_to_path[rpm] = (
                        step / repo_name / package_dir.basename() /
                        build_rpm(package_dir, arch, rpm)
                    )
            # Now that all RPMs were built, we can generate the Yum metadata
            subprocess.run(['createrepo_c', repo_dir], check=True)
        for prog_name in ['dnf', 'yum']:
            with open(step_dir / f'{prog_name}.conf', 'w') as out_f:
                yum_dnf_conf.write(out_f) 
Example 70
Project: alfred-yubikey-otp   Author: robertoriv   File: notify.py    MIT License 4 votes vote down vote up
def png_to_icns(png_path, icns_path):
    """Convert PNG file to ICNS using ``iconutil``.

    Create an iconset from the source PNG file. Generate PNG files
    in each size required by macOS, then call ``iconutil`` to turn
    them into a single ICNS file.

    Args:
        png_path (str): Path to source PNG file.
        icns_path (str): Path to destination ICNS file.

    Raises:
        RuntimeError: Raised if ``iconutil`` or ``sips`` fail.
    """
    tempdir = tempfile.mkdtemp(prefix='aw-', dir=wf().datadir)

    try:
        iconset = os.path.join(tempdir, 'Icon.iconset')

        assert not os.path.exists(iconset), \
            'iconset already exists: ' + iconset
        os.makedirs(iconset)

        # Copy source icon to icon set and generate all the other
        # sizes needed
        configs = []
        for i in (16, 32, 128, 256, 512):
            configs.append(('icon_{0}x{0}.png'.format(i), i))
            configs.append((('icon_{0}x{0}@2x.png'.format(i), i * 2)))

        shutil.copy(png_path, os.path.join(iconset, 'icon_256x256.png'))
        shutil.copy(png_path, os.path.join(iconset, 'icon_128x128@2x.png'))

        for name, size in configs:
            outpath = os.path.join(iconset, name)
            if os.path.exists(outpath):
                continue
            convert_image(png_path, outpath, size)

        cmd = [
            b'iconutil',
            b'-c', b'icns',
            b'-o', icns_path,
            iconset]

        retcode = subprocess.call(cmd)
        if retcode != 0:
            raise RuntimeError('iconset exited with %d' % retcode)

        assert os.path.exists(icns_path), \
            'generated ICNS file not found: ' + repr(icns_path)
    finally:
        try:
            shutil.rmtree(tempdir)
        except OSError:  # pragma: no cover
            pass 
Example 71
Project: pyblish-win   Author: pyblish   File: shutil.py    GNU Lesser General Public License v3.0 4 votes vote down vote up
def copytree(src, dst, symlinks=False, ignore=None):
    """Recursively copy a directory tree using copy2().

    The destination directory must not already exist.
    If exception(s) occur, an Error is raised with a list of reasons.

    If the optional symlinks flag is true, symbolic links in the
    source tree result in symbolic links in the destination tree; if
    it is false, the contents of the files pointed to by symbolic
    links are copied.

    The optional ignore argument is a callable. If given, it
    is called with the `src` parameter, which is the directory
    being visited by copytree(), and `names` which is the list of
    `src` contents, as returned by os.listdir():

        callable(src, names) -> ignored_names

    Since copytree() is called recursively, the callable will be
    called once for each directory that is copied. It returns a
    list of names relative to the `src` directory that should
    not be copied.

    XXX Consider this example code rather than the ultimate tool.

    """
    names = os.listdir(src)
    if ignore is not None:
        ignored_names = ignore(src, names)
    else:
        ignored_names = set()

    os.makedirs(dst)
    errors = []
    for name in names:
        if name in ignored_names:
            continue
        srcname = os.path.join(src, name)
        dstname = os.path.join(dst, name)
        try:
            if symlinks and os.path.islink(srcname):
                linkto = os.readlink(srcname)
                os.symlink(linkto, dstname)
            elif os.path.isdir(srcname):
                copytree(srcname, dstname, symlinks, ignore)
            else:
                # Will raise a SpecialFileError for unsupported file types
                copy2(srcname, dstname)
        # catch the Error from the recursive copytree so that we can
        # continue with other files
        except Error, err:
            errors.extend(err.args[0])
        except EnvironmentError, why:
            errors.append((srcname, dstname, str(why))) 
Example 72
Project: pyblish-win   Author: pyblish   File: shutil.py    GNU Lesser General Public License v3.0 4 votes vote down vote up
def _make_tarball(base_name, base_dir, compress="gzip", verbose=0, dry_run=0,
                  owner=None, group=None, logger=None):
    """Create a (possibly compressed) tar file from all the files under
    'base_dir'.

    'compress' must be "gzip" (the default), "bzip2", or None.

    'owner' and 'group' can be used to define an owner and a group for the
    archive that is being built. If not provided, the current owner and group
    will be used.

    The output tar file will be named 'base_name' +  ".tar", possibly plus
    the appropriate compression extension (".gz", or ".bz2").

    Returns the output filename.
    """
    tar_compression = {'gzip': 'gz', 'bzip2': 'bz2', None: ''}
    compress_ext = {'gzip': '.gz', 'bzip2': '.bz2'}

    # flags for compression program, each element of list will be an argument
    if compress is not None and compress not in compress_ext.keys():
        raise ValueError, \
              ("bad value for 'compress': must be None, 'gzip' or 'bzip2'")

    archive_name = base_name + '.tar' + compress_ext.get(compress, '')
    archive_dir = os.path.dirname(archive_name)

    if archive_dir and not os.path.exists(archive_dir):
        if logger is not None:
            logger.info("creating %s", archive_dir)
        if not dry_run:
            os.makedirs(archive_dir)


    # creating the tarball
    import tarfile  # late import so Python build itself doesn't break

    if logger is not None:
        logger.info('Creating tar archive')

    uid = _get_uid(owner)
    gid = _get_gid(group)

    def _set_uid_gid(tarinfo):
        if gid is not None:
            tarinfo.gid = gid
            tarinfo.gname = group
        if uid is not None:
            tarinfo.uid = uid
            tarinfo.uname = owner
        return tarinfo

    if not dry_run:
        tar = tarfile.open(archive_name, 'w|%s' % tar_compression[compress])
        try:
            tar.add(base_dir, filter=_set_uid_gid)
        finally:
            tar.close()

    return archive_name 
Example 73
Project: pyblish-win   Author: pyblish   File: shutil.py    GNU Lesser General Public License v3.0 4 votes vote down vote up
def _make_zipfile(base_name, base_dir, verbose=0, dry_run=0, logger=None):
    """Create a zip file from all the files under 'base_dir'.

    The output zip file will be named 'base_name' + ".zip".  Uses either the
    "zipfile" Python module (if available) or the InfoZIP "zip" utility
    (if installed and found on the default search path).  If neither tool is
    available, raises ExecError.  Returns the name of the output zip
    file.
    """
    zip_filename = base_name + ".zip"
    archive_dir = os.path.dirname(base_name)

    if archive_dir and not os.path.exists(archive_dir):
        if logger is not None:
            logger.info("creating %s", archive_dir)
        if not dry_run:
            os.makedirs(archive_dir)

    # If zipfile module is not available, try spawning an external 'zip'
    # command.
    try:
        import zipfile
    except ImportError:
        zipfile = None

    if zipfile is None:
        _call_external_zip(base_dir, zip_filename, verbose, dry_run)
    else:
        if logger is not None:
            logger.info("creating '%s' and adding '%s' to it",
                        zip_filename, base_dir)

        if not dry_run:
            with zipfile.ZipFile(zip_filename, "w",
                                 compression=zipfile.ZIP_DEFLATED) as zf:
                for dirpath, dirnames, filenames in os.walk(base_dir):
                    for name in filenames:
                        path = os.path.normpath(os.path.join(dirpath, name))
                        if os.path.isfile(path):
                            zf.write(path, path)
                            if logger is not None:
                                logger.info("adding '%s'", path)

    return zip_filename 
Example 74
Project: pyblish-win   Author: pyblish   File: tarfile.py    GNU Lesser General Public License v3.0 4 votes vote down vote up
def _extract_member(self, tarinfo, targetpath):
        """Extract the TarInfo object tarinfo to a physical
           file called targetpath.
        """
        # Fetch the TarInfo object for the given name
        # and build the destination pathname, replacing
        # forward slashes to platform specific separators.
        targetpath = targetpath.rstrip("/")
        targetpath = targetpath.replace("/", os.sep)

        # Create all upper directories.
        upperdirs = os.path.dirname(targetpath)
        if upperdirs and not os.path.exists(upperdirs):
            # Create directories that are not part of the archive with
            # default permissions.
            os.makedirs(upperdirs)

        if tarinfo.islnk() or tarinfo.issym():
            self._dbg(1, "%s -> %s" % (tarinfo.name, tarinfo.linkname))
        else:
            self._dbg(1, tarinfo.name)

        if tarinfo.isreg():
            self.makefile(tarinfo, targetpath)
        elif tarinfo.isdir():
            self.makedir(tarinfo, targetpath)
        elif tarinfo.isfifo():
            self.makefifo(tarinfo, targetpath)
        elif tarinfo.ischr() or tarinfo.isblk():
            self.makedev(tarinfo, targetpath)
        elif tarinfo.islnk() or tarinfo.issym():
            self.makelink(tarinfo, targetpath)
        elif tarinfo.type not in SUPPORTED_TYPES:
            self.makeunknown(tarinfo, targetpath)
        else:
            self.makefile(tarinfo, targetpath)

        self.chown(tarinfo, targetpath)
        if not tarinfo.issym():
            self.chmod(tarinfo, targetpath)
            self.utime(tarinfo, targetpath)

    #--------------------------------------------------------------------------
    # Below are the different file methods. They are called via
    # _extract_member() when extract() is called. They can be replaced in a
    # subclass to implement other functionality. 
Example 75
Project: pyblish-win   Author: pyblish   File: zipfile.py    GNU Lesser General Public License v3.0 4 votes vote down vote up
def _extract_member(self, member, targetpath, pwd):
        """Extract the ZipInfo object 'member' to a physical
           file on the path targetpath.
        """
        # build the destination pathname, replacing
        # forward slashes to platform specific separators.
        arcname = member.filename.replace('/', os.path.sep)

        if os.path.altsep:
            arcname = arcname.replace(os.path.altsep, os.path.sep)
        # interpret absolute pathname as relative, remove drive letter or
        # UNC path, redundant separators, "." and ".." components.
        arcname = os.path.splitdrive(arcname)[1]
        arcname = os.path.sep.join(x for x in arcname.split(os.path.sep)
                    if x not in ('', os.path.curdir, os.path.pardir))
        if os.path.sep == '\\':
            # filter illegal characters on Windows
            illegal = ':<>|"?*'
            if isinstance(arcname, unicode):
                table = {ord(c): ord('_') for c in illegal}
            else:
                table = string.maketrans(illegal, '_' * len(illegal))
            arcname = arcname.translate(table)
            # remove trailing dots
            arcname = (x.rstrip('.') for x in arcname.split(os.path.sep))
            arcname = os.path.sep.join(x for x in arcname if x)

        targetpath = os.path.join(targetpath, arcname)
        targetpath = os.path.normpath(targetpath)

        # Create all upper directories if necessary.
        upperdirs = os.path.dirname(targetpath)
        if upperdirs and not os.path.exists(upperdirs):
            os.makedirs(upperdirs)

        if member.filename[-1] == '/':
            if not os.path.isdir(targetpath):
                os.mkdir(targetpath)
            return targetpath

        with self.open(member, pwd=pwd) as source, \
             file(targetpath, "wb") as target:
            shutil.copyfileobj(source, target)

        return targetpath 
Example 76
Project: aospy   Author: spencerahill   File: calc.py    Apache License 2.0 4 votes vote down vote up
def _write_to_tar(self, dtype_out_time):
        """Add the data to the tar file in tar_out_direc."""
        # When submitted in parallel and the directory does not exist yet
        # multiple processes may try to create a new directory; this leads
        # to an OSError for all processes that tried to make the
        # directory, but were later than the first.
        try:
            os.makedirs(self.dir_tar_out)
        except OSError:
            pass
        # tarfile 'append' mode won't overwrite the old file, which we want.
        # So open in 'read' mode, extract the file, and then delete it.
        # But 'read' mode throws OSError if file doesn't exist: make it first.
        utils.io.dmget([self.path_tar_out])
        with tarfile.open(self.path_tar_out, 'a') as tar:
            pass
        with tarfile.open(self.path_tar_out, 'r') as tar:
            old_data_path = os.path.join(self.dir_tar_out,
                                         self.file_name[dtype_out_time])
            try:
                tar.extract(self.file_name[dtype_out_time],
                            path=old_data_path)
            except KeyError:
                pass
            else:
                # The os module treats files on archive as non-empty
                # directories, so can't use os.remove or os.rmdir.
                shutil.rmtree(old_data_path)
                retcode = subprocess.call([
                    "tar", "--delete", "--file={}".format(self.path_tar_out),
                    self.file_name[dtype_out_time]
                ])
                if retcode:
                    msg = ("The 'tar' command to save your aospy output "
                           "exited with an error.  Most likely, this is due "
                           "to using an old version of 'tar' (especially if "
                           "you are on a Mac).  Consider installing a newer "
                           "version of 'tar' or disabling tar output by "
                           "setting `write_to_tar=False` in the "
                           "`calc_exec_options` argument of "
                           "`submit_mult_calcs`.")
                    logging.warn(msg)
        with tarfile.open(self.path_tar_out, 'a') as tar:
            tar.add(self.path_out[dtype_out_time],
                    arcname=self.file_name[dtype_out_time]) 
Example 77
Project: Att-ChemdNER   Author: lingluodlut   File: model.py    Apache License 2.0 4 votes vote down vote up
def __init__(self, parameters=None, models_path=None, 
                 model_path=None,Training=False):
#{{{
        """
        Initialize the model. We either provide the parameters and a path where
        we store the models, or the location of a trained model.
        """
        if Training: 
#{{{
            assert parameters and models_path 
            # Create a name based on the parameters
            self.parameters = parameters
            self.name = get_name(parameters)
            # Model location 
            if model_path is None:
                model_path = os.path.join(models_path, self.name)
            self.model_path = model_path
            self.parameters_path = os.path.join(model_path, 'parameters.pkl')
            self.mappings_path = os.path.join(model_path, 'mappings.pkl')
            # Create directory for the model if it does not exist
            if not os.path.exists(self.model_path):
                os.makedirs(self.model_path)
            # Save the parameters to disk
            with open(self.parameters_path, 'wb') as f:
                cPickle.dump(parameters, f) 
#}}}
        else: 
#{{{
            # Model location
            self.model_path = model_path
            self.parameters_path = os.path.join(model_path, 'parameters.pkl')
            self.mappings_path = os.path.join(model_path, 'mappings.pkl')
            # Create directory for the model if it does not exist
            if not os.path.exists(self.model_path):
                os.makedirs(self.model_path)
            # Save the parameters to disk
            with open(self.parameters_path, 'rb') as f:
                self.parameters=cPickle.load(f);
            self.reload_mappings();
        self.components = {}
#}}}
#}}} 
Example 78
Project: wechat-alfred-workflow   Author: TKkk-iOSer   File: notify.py    MIT License 4 votes vote down vote up
def png_to_icns(png_path, icns_path):
    """Convert PNG file to ICNS using ``iconutil``.

    Create an iconset from the source PNG file. Generate PNG files
    in each size required by macOS, then call ``iconutil`` to turn
    them into a single ICNS file.

    Args:
        png_path (str): Path to source PNG file.
        icns_path (str): Path to destination ICNS file.

    Raises:
        RuntimeError: Raised if ``iconutil`` or ``sips`` fail.
    """
    tempdir = tempfile.mkdtemp(prefix='aw-', dir=wf().datadir)

    try:
        iconset = os.path.join(tempdir, 'Icon.iconset')

        assert not os.path.exists(iconset), \
            'iconset already exists: ' + iconset
        os.makedirs(iconset)

        # Copy source icon to icon set and generate all the other
        # sizes needed
        configs = []
        for i in (16, 32, 128, 256, 512):
            configs.append(('icon_{0}x{0}.png'.format(i), i))
            configs.append((('icon_{0}x{0}@2x.png'.format(i), i * 2)))

        shutil.copy(png_path, os.path.join(iconset, 'icon_256x256.png'))
        shutil.copy(png_path, os.path.join(iconset, 'icon_128x128@2x.png'))

        for name, size in configs:
            outpath = os.path.join(iconset, name)
            if os.path.exists(outpath):
                continue
            convert_image(png_path, outpath, size)

        cmd = [
            b'iconutil',
            b'-c', b'icns',
            b'-o', icns_path,
            iconset]

        retcode = subprocess.call(cmd)
        if retcode != 0:
            raise RuntimeError('iconset exited with %d' % retcode)

        assert os.path.exists(icns_path), \
            'generated ICNS file not found: ' + repr(icns_path)
    finally:
        try:
            shutil.rmtree(tempdir)
        except OSError:  # pragma: no cover
            pass 
Example 79
Project: slidoc   Author: mitotic   File: pptx2md.py    BSD 3-Clause "New" or "Revised" License 4 votes vote down vote up
def copy_image(self, image_num, img_data, img_params='', img_path='', img_ext='', img_name=''):
        # Return image reference
        if not img_ext and img_path:
            img_ext = os.path.splitext(os.path.basename(img_path))[1][1:].lower()

        if not self.args_dict.get('img_dir'):
            # Embed images within Markdown
            if img_name:
                key = img_name
            else:
                key = 'img%02d' % (len(self.image_defs)+1)
            if key in self.image_keys:
                raise Exception('Duplicate image file name: '+key)
            self.image_keys.add(key)
            ctype = 'jpeg' if img_ext == 'jpg' else img_ext
            self.image_defs.append('[%s]: data:image/%s;base64,%s %s' % (key, ctype, base64.b64encode(img_data), img_params) )
            return '![image%d][%s]' % (image_num, key)

        if img_name:
            img_copy = img_name + '.' + img_ext
        else:
            prefix = self.fileprefix + '-' if self.fileprefix else ''
            if img_path:
                img_copy = prefix + os.path.basename(img_path)
            else:
                self.img_count += 1
                img_copy = prefix + ('image%02d.%s' % (self.img_count, img_ext))

        if self.img_zip:
            # Write image file to zip archive
            zprefix = os.path.splitext(os.path.basename(self.img_dir))[0]
            if zprefix:
                img_copy = zprefix + '/' + img_copy
            self.img_zip.writestr(img_copy, img_data)
        elif not self.nofile:
            # Write image file to disk
            if self.img_dir:
                img_copy = self.img_dir + '/' + img_copy
                if not os.path.exists(self.img_dir):
                    os.makedirs(self.img_dir)

            with open(img_copy, 'w') as f:
                f.write(img_data)

        return '![image%d](%s %s)' % (image_num, img_copy, img_params) 
Example 80
Project: ieml   Author: IEMLdev   File: migrate_v03Tov04.py    GNU General Public License v3.0 -295 votes vote down vote up
def migrate(database, out_folder):
    descriptors = database.descriptors()
    dictionary = database.dictionary_structure()
    # 'root', 'paradigms', 'inhibitions'

    shutil.rmtree(out_folder + '/descriptors')
    shutil.rmtree(out_folder + '/structure')
    # os.rmdir(out_folder)

    # os.mkdir(out_folder)

    db2 = IEMLDatabase(out_folder)
    # db2.get_csv()

    if not os.path.isdir(out_folder):
        os.mkdir(out_folder)

    for ieml, (paradigms, inhibitions) in tqdm.tqdm(dictionary.structure.iterrows(), 'migrating structure'):
        l = IEMLParser().parse(ieml, factorize_script=True)

        db2.add_structure(str(l), 'is_root', True)
        for i in inhibitions:
            db2.add_structure(str(l), 'inhibition', i)

    all_db = defaultdict(lambda : defaultdict(dict))

    for (ieml, lang, desc), (v) in descriptors:
        all_db[ieml][(lang,desc)] = v.values[0]

    for ieml, dd in tqdm.tqdm(all_db.items(), 'migrating descriptors'):
        l = IEMLParser().parse(ieml, factorize_script=True)

        path = db2.path_of(l)

        os.makedirs('/'.join(path.split('/')[:-1]), exist_ok=True)


        with open(path, 'w') as fp:
            for (lang, desc), v in dd.items():
                for vv in v:
                    fp.write('"{}" {} {} "{}"\n'.format(str(l), lang, desc, db2.escape_value(vv)))


            # fp.write(json.dumps({'ieml': str(l), **dd}, indent=True))