Python tempfile.NamedTemporaryFile() Examples

The following are 30 code examples of tempfile.NamedTemporaryFile(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module tempfile , or try the search function .
Example #1
Source File: GXManufacturerCollection.py    From Gurux.DLMS.Python with GNU General Public License v2.0 7 votes vote down vote up
def isUpdatesAvailable(cls, path):
        if sys.version_info < (3, 0):
            return False
        # pylint: disable=broad-except
        if not os.path.isfile(os.path.join(path, "files.xml")):
            return True
        try:
            available = dict()
            for it in ET.parse(os.path.join(path, "files.xml")).iter():
                if it.tag == "File":
                    available[it.text] = datetime.datetime.strptime(it.attrib["Modified"], "%d-%m-%Y")

            path = NamedTemporaryFile()
            path.close()
            urllib.request.urlretrieve("https://www.gurux.fi/obis/files.xml", path.name)
            for it in ET.parse(path.name).iter():
                if it.tag == "File":
                    tmp = datetime.datetime.strptime(it.attrib["Modified"], "%d-%m-%Y")
                    if not it.text in available or available[it.text] != tmp:
                        return True
        except Exception as e:
            print(e)
            return True
        return False 
Example #2
Source File: setup.py    From PyOptiX with MIT License 6 votes vote down vote up
def save_pyoptix_conf(nvcc_path, compile_args, include_dirs, library_dirs, libraries):
    try:
        config = ConfigParser()
        config.add_section('pyoptix')

        config.set('pyoptix', 'nvcc_path', nvcc_path)
        config.set('pyoptix', 'compile_args', os.pathsep.join(compile_args))
        config.set('pyoptix', 'include_dirs', os.pathsep.join(include_dirs))
        config.set('pyoptix', 'library_dirs', os.pathsep.join(library_dirs))
        config.set('pyoptix', 'libraries', os.pathsep.join(libraries))

        tmp = NamedTemporaryFile(mode='w+', delete=False)
        config.write(tmp)
        tmp.close()
        config_path = os.path.join(os.path.dirname(sys.executable), 'pyoptix.conf')
        check_call_sudo_if_fails(['cp', tmp.name, config_path])
        check_call_sudo_if_fails(['cp', tmp.name, '/etc/pyoptix.conf'])
        check_call_sudo_if_fails(['chmod', '644', config_path])
        check_call_sudo_if_fails(['chmod', '644', '/etc/pyoptix.conf'])
    except Exception as e:
        print("PyOptiX configuration could not be saved. When you use pyoptix.Compiler, "
              "nvcc path must be in PATH, OptiX library paths must be in LD_LIBRARY_PATH, and pyoptix.Compiler "
              "attributes should be set manually.") 
Example #3
Source File: processing.py    From incubator-spot with Apache License 2.0 6 votes vote down vote up
def convert(netflow, tmpdir, opts='', prefix=None):
    '''
        Convert `nfcapd` file to a comma-separated output format.

    :param netflow : Path of binary file.
    :param tmpdir  : Path of local staging area.
    :param opts    : A set of options for `nfdump` command.
    :param prefix  : If `prefix` is specified, the file name will begin with that;
                     otherwise, a default `prefix` is used.
    :returns       : Path of CSV-converted file.
    :rtype         : ``str``
    :raises OSError: If an error occurs while executing the `nfdump` command.
    '''
    logger = logging.getLogger('SPOT.INGEST.FLOW.PROCESS')

    with tempfile.NamedTemporaryFile(prefix=prefix, dir=tmpdir, delete=False) as fp:
        command = COMMAND.format(netflow, opts, fp.name)

        logger.debug('Execute command: {0}'.format(command))
        Util.popen(command, raises=True)

        return fp.name 
Example #4
Source File: model.py    From models with MIT License 6 votes vote down vote up
def predict_on_batch(self, inputs):
        # write test fasta file
        temp_input = tempfile.NamedTemporaryFile(suffix = ".txt")
        test_fname = temp_input.name
        encode_sequence_into_fasta_file(ofname = test_fname, seq = inputs.tolist())
        # test gkmsvm
        temp_ofp = tempfile.NamedTemporaryFile(suffix = ".txt")
        threads_option = '-T %s' % (str(self.threads))
        verbosity_option = '-v 0'
        command = ' '.join(['gkmpredict',
                            test_fname,
                            self.model_file,
                            temp_ofp.name,
                            threads_option,
                            verbosity_option])
        #process = subprocess.Popen(command, shell=True)
        #process.wait()  # wait for it to finish
        exit_code = os.system(command)
        temp_input.close()
        assert exit_code == 0
        # get classification results
        temp_ofp.seek(0)
        y = np.array([line.split()[-1] for line in temp_ofp], dtype=float)
        temp_ofp.close()
        return np.expand_dims(y, 1) 
Example #5
Source File: processing.py    From incubator-spot with Apache License 2.0 6 votes vote down vote up
def convert(logfile, tmpdir, opts='', prefix=None):
    '''
        Copy log file to the local staging area.

    :param logfile: Path of log file.
    :param tmpdir : Path of local staging area.
    :param opts   : A set of options for the `cp` command.
    :param prefix : If `prefix` is specified, the file name will begin with that;
                     otherwise, a default `prefix` is used.
    :returns      : Path of log file in local staging area.
    :rtype        : ``str``
    '''
    logger = logging.getLogger('SPOT.INGEST.PROXY.PROCESS')

    with tempfile.NamedTemporaryFile(prefix=prefix, dir=tmpdir, delete=False) as fp:
        command = COMMAND.format(opts, logfile, fp.name)

        logger.debug('Execute command: {0}'.format(command))
        Util.popen(command, raises=True)

        return fp.name 
Example #6
Source File: processing.py    From incubator-spot with Apache License 2.0 6 votes vote down vote up
def convert(pcap, tmpdir, opts='', prefix=None):
    '''
        Convert `pcap` file to a comma-separated output format.

    :param pcap    : Path of binary file.
    :param tmpdir  : Path of local staging area.
    :param opts    : A set of options for `tshark` command.
    :param prefix  : If `prefix` is specified, the file name will begin with that;
                     otherwise, a default `prefix` is used.
    :returns       : Path of CSV-converted file.
    :rtype         : ``str``
    :raises OSError: If an error occurs while executing the `tshark` command.
    '''
    logger = logging.getLogger('SPOT.INGEST.DNS.PROCESS')

    with tempfile.NamedTemporaryFile(prefix=prefix, dir=tmpdir, delete=False) as fp:
        command = COMMAND.format(pcap, opts, fp.name)

        logger.debug('Execute command: {0}'.format(command))
        Util.popen(command, raises=True)

        return fp.name 
Example #7
Source File: upgrade_model_version.py    From mmdetection with Apache License 2.0 6 votes vote down vote up
def parse_config(config_strings):
    temp_file = tempfile.NamedTemporaryFile()
    config_path = f'{temp_file.name}.py'
    with open(config_path, 'w') as f:
        f.write(config_strings)

    config = Config.fromfile(config_path)
    is_two_stage = True
    is_ssd = False
    is_retina = False
    reg_cls_agnostic = False
    if 'rpn_head' not in config.model:
        is_two_stage = False
        # check whether it is SSD
        if config.model.bbox_head.type == 'SSDHead':
            is_ssd = True
        elif config.model.bbox_head.type == 'RetinaHead':
            is_retina = True
    elif isinstance(config.model['bbox_head'], list):
        reg_cls_agnostic = True
    elif 'reg_class_agnostic' in config.model.bbox_head:
        reg_cls_agnostic = config.model.bbox_head \
            .reg_class_agnostic
    temp_file.close()
    return is_two_stage, is_ssd, is_retina, reg_cls_agnostic 
Example #8
Source File: algorithms.py    From neural-style-docker with MIT License 6 votes vote down vote up
def gatys(content, style, outfile, size, weight, stylescale, algparams):
    """Runs Gatys et al style-transfer algorithm

    References:
        * https://arxiv.org/abs/1508.06576
        * https://github.com/jcjohnson/neural-style
    """
    # Gatys can only process one combination of content, style, weight and scale at a time, so we need to iterate
    tmpout = NamedTemporaryFile(suffix=".png")
    runalgorithm("gatys", [
        "-content_image", content,
        "-style_image", style,
        "-style_weight", weight * 100,  # Because content weight is 100
        "-style_scale", stylescale,
        "-output_image", tmpout.name,
        "-image_size", size if size is not None else shape(content)[0],
        *algparams
    ])
    # Transform to original file format
    convert(tmpout.name, outfile)
    tmpout.close() 
Example #9
Source File: test_hostconfig.py    From Paradrop with Apache License 2.0 6 votes vote down vote up
def test_prepareHostConfig(settings, detectSystemDevices):
    """
    Test paradrop.core.config.hostconfig.prepareHostConfig
    """
    from paradrop.core.config.hostconfig import prepareHostConfig

    devices = {
        'wan': [{'name': 'eth0'}],
        'lan': list(),
        'wifi': list()
    }
    detectSystemDevices.return_value = devices

    source = tempfile.NamedTemporaryFile(delete=True)
    source.write("{test: value}")
    source.flush()

    settings.HOST_CONFIG_FILE = source.name
    settings.DEFAULT_LAN_ADDRESS = "1.2.3.4"
    settings.DEFAULT_LAN_NETWORK = "1.0.0.0/24"

    config = prepareHostConfig()
    assert config['test'] == 'value' 
Example #10
Source File: download.py    From glazier with Apache License 2.0 6 votes vote down vote up
def DownloadFileTemp(self, url, max_retries=5, show_progress=False):
    """Downloads a file to temporary storage.

    Args:
      url:  The address of the file to be downloaded.
      max_retries:  The number of times to attempt to download
        a file if the first attempt fails.
      show_progress: Print download progress to stdout (overrides default).

    Returns:
      A string containing a path to the temporary file.
    """
    destination = tempfile.NamedTemporaryFile()
    self._save_location = destination.name
    destination.close()
    if self._beyondcorp.CheckBeyondCorp():
      url = self._SetUrl(url)
      max_retries = -1
    file_stream = self._OpenStream(url, max_retries)
    self._StreamToDisk(file_stream, show_progress)
    return self._save_location 
Example #11
Source File: translate.py    From flores with Creative Commons Attribution Share Alike 4.0 International 6 votes vote down vote up
def translate_files_slurm(args, cmds, expected_output_files):
    conda_env = '/private/home/pipibjc/.conda/envs/fairseq-20190509'
    for cmd in cmds:
        with TempFile('w') as script:
            sh = f"""#!/bin/bash
            source activate {conda_env}
            {cmd}
            """
            print(sh)
            script.write(sh)
            script.flush()
            cmd = f"sbatch --gres=gpu:1 -c {args.cpu + 2} {args.sbatch_args} --time=15:0:0 {script.name}"
            import sys
            print(cmd, file=sys.stderr)
            check_call(cmd, shell=True)

    # wait for all outputs has finished
    num_finished = 0
    while num_finished < len(expected_output_files):
        num_finished = 0
        for output_file in expected_output_files:
            num_finished += 1 if check_finished(output_file) else 0
        if num_finished < len(expected_output_files):
            time.sleep(3 * 60)
            print("sleeping for 3m ...") 
Example #12
Source File: tokenization_test.py    From BERT-Classification-Tutorial with Apache License 2.0 6 votes vote down vote up
def test_full_tokenizer(self):
        vocab_tokens = [
            "[UNK]", "[CLS]", "[SEP]", "want", "##want", "##ed", "wa", "un", "runn",
            "##ing", ","
        ]
        with tempfile.NamedTemporaryFile(delete=False) as vocab_writer:
            vocab_writer.write("".join([x + "\n" for x in vocab_tokens]))

            vocab_file = vocab_writer.name

        tokenizer = tokenization.FullTokenizer(vocab_file)
        os.unlink(vocab_file)

        tokens = tokenizer.tokenize(u"UNwant\u00E9d,running")
        self.assertAllEqual(tokens, ["un", "##want", "##ed", ",", "runn", "##ing"])

        self.assertAllEqual(
            tokenizer.convert_tokens_to_ids(tokens), [7, 4, 5, 10, 8, 9]) 
Example #13
Source File: gnupg.py    From Authenticator with GNU General Public License v2.0 6 votes vote down vote up
def __on_apply(self, *__):
        from ...models import BackupJSON
        try:
            paraphrase = self.paraphrase_widget.entry.get_text()
            if not paraphrase:
                paraphrase = " "
            output_file = path.join(GLib.get_user_cache_dir(),
                                    path.basename(NamedTemporaryFile().name))
            status = GPG.get_default().decrypt_json(self._filename, paraphrase, output_file)
            if status.ok:
                BackupJSON.import_file(output_file)
                self.destroy()
            else:
                self.__send_notification(_("There was an error during the import of the encrypted file."))

        except AttributeError:
            Logger.error("[GPG] Invalid JSON file.") 
Example #14
Source File: file.py    From gnocchi with Apache License 2.0 6 votes vote down vote up
def _store_new_measures(self, metric_id, data):
        tmpfile = tempfile.NamedTemporaryFile(
            prefix='gnocchi', dir=self.basepath_tmp,
            delete=False)
        tmpfile.write(data)
        tmpfile.close()
        path = self._build_measure_path(metric_id, True)
        while True:
            try:
                os.rename(tmpfile.name, path)
                break
            except OSError as e:
                if e.errno != errno.ENOENT:
                    raise
                try:
                    os.mkdir(self._build_measure_path(metric_id))
                except OSError as e:
                    # NOTE(jd) It's possible that another process created the
                    # path just before us! In this case, good for us, let's do
                    # nothing then! (see bug #1475684)
                    if e.errno != errno.EEXIST:
                        raise 
Example #15
Source File: test_diis.py    From pyscf with Apache License 2.0 6 votes vote down vote up
def test_diis_restart(self):
        mol = gto.M(
            verbose = 7,
            output = '/dev/null',
            atom = '''
        O     0    0        0
        H     0    -1.757   1.587
        H     0    1.757    1.587''',
            basis = '631g',
        )
        tmpf = tempfile.NamedTemporaryFile()
        mf = scf.RHF(mol)
        mf.diis_file = tmpf.name
        eref = mf.kernel()
        self.assertAlmostEqual(eref, -75.44606939063496, 9)

        mf = scf.RHF(mol)
        mf.diis = scf.diis.DIIS().restore(tmpf.name)
        mf.max_cycle = 3
        e = mf.kernel()
        self.assertAlmostEqual(e, eref, 9) 
Example #16
Source File: uintermediates_slow.py    From pyscf with Apache License 2.0 6 votes vote down vote up
def cc_Wvvvv(t1,t2,eris):
    tau = make_tau(t2,t1,t1)
    #eris_vovv = np.array(eris.ovvv).transpose(1,0,3,2)
    #tmp = einsum('mb,amef->abef',t1,eris_vovv)
    #Wabef = eris.vvvv - tmp + tmp.transpose(1,0,2,3)
    #Wabef += 0.25*einsum('mnab,mnef->abef',tau,eris.oovv)
    if t1.dtype == np.complex: ds_type = 'c16'
    else: ds_type = 'f8'
    _tmpfile1 = tempfile.NamedTemporaryFile(dir=lib.param.TMPDIR)
    fimd = h5py.File(_tmpfile1.name)
    nocc, nvir = t1.shape
    Wabef = fimd.create_dataset('vvvv', (nvir,nvir,nvir,nvir), ds_type)
    for a in range(nvir):
        Wabef[a] = eris.vvvv[a] 
        Wabef[a] -= einsum('mb,mfe->bef',t1,eris.ovvv[:,a,:,:]) 
        Wabef[a] += einsum('m,mbfe->bef',t1[:,a],eris.ovvv) 
        Wabef[a] += 0.25*einsum('mnb,mnef->bef',tau[:,:,a,:],eris.oovv)
    return Wabef 
Example #17
Source File: uintermediates_slow.py    From pyscf with Apache License 2.0 6 votes vote down vote up
def Wvvvv(t1,t2,eris):
    tau = make_tau(t2,t1,t1)
    #Wabef = cc_Wvvvv(t1,t2,eris) + 0.25*einsum('mnab,mnef->abef',tau,eris.oovv)
    if t1.dtype == np.complex: ds_type = 'c16'
    else: ds_type = 'f8'
    _tmpfile1 = tempfile.NamedTemporaryFile(dir=lib.param.TMPDIR)
    fimd = h5py.File(_tmpfile1.name)
    nocc, nvir = t1.shape
    Wabef = fimd.create_dataset('vvvv', (nvir,nvir,nvir,nvir), ds_type)
    #_cc_Wvvvv = cc_Wvvvv(t1,t2,eris)
    for a in range(nvir):
        #Wabef[a] = _cc_Wvvvv[a]
        Wabef[a] = eris.vvvv[a] 
        Wabef[a] -= einsum('mb,mfe->bef',t1,eris.ovvv[:,a,:,:]) 
        Wabef[a] += einsum('m,mbfe->bef',t1[:,a],eris.ovvv) 
        #Wabef[a] += 0.25*einsum('mnb,mnef->bef',tau[:,:,a,:],eris.oovv)

        #Wabef[a] += 0.25*einsum('mnb,mnef->bef',tau[:,:,a,:],eris.oovv) 
        Wabef[a] += 0.5*einsum('mnb,mnef->bef',tau[:,:,a,:],eris.oovv) 
    return Wabef 
Example #18
Source File: test_mole.py    From pyscf with Apache License 2.0 6 votes vote down vote up
def test_tofile(self):
        tmpfile = tempfile.NamedTemporaryFile()
        mol = gto.M(atom=[[1  , (0.,1.,1.)],
                          ["O1", (0.,0.,0.)],
                          [1  , (1.,1.,0.)], ])
        out1 = mol.tofile(tmpfile.name, format='xyz')
        ref = '''3
XYZ from PySCF
H           0.00000        1.00000        1.00000
O           0.00000        0.00000        0.00000
H           1.00000        1.00000        0.00000
'''
        with open(tmpfile.name, 'r') as f:
            self.assertEqual(f.read(), ref)
        self.assertEqual(out1, ref[:-1])

        tmpfile = tempfile.NamedTemporaryFile(suffix='.zmat')
        str1 = mol.tofile(tmpfile.name, format='zmat')
        #FIXME:self.assertEqual(mol._atom, mol.fromfile(tmpfile.name)) 
Example #19
Source File: mdf.py    From pyscf with Apache License 2.0 5 votes vote down vote up
def __init__(self, cell, kpts=numpy.zeros((1,3))):
        self.cell = cell
        self.stdout = cell.stdout
        self.verbose = cell.verbose
        self.max_memory = cell.max_memory

        self.kpts = kpts  # default is gamma point
        self.kpts_band = None
        self._auxbasis = None
        self.mesh = _mesh_for_valence(cell)

        # In MDF, fitting PWs (self.mesh), and parameters eta and exp_to_discard
        # are related to each other. The compensated function does not need to
        # be very smooth. It just needs to be expanded by the specified PWs
        # (self.mesh). self.eta is estimated on the fly based on the value of
        # self.mesh.
        self.eta = None

        # Any functions which are more diffused than the compensated Gaussian
        # are linearly dependent to the PWs. They can be removed from the
        # auxiliary set without affecting the accuracy of MDF. exp_to_discard
        # can be set to the value of self.eta
        self.exp_to_discard = None

        # The following attributes are not input options.
        self.exxdiv = None  # to mimic KRHF/KUHF object in function get_coulG
        self.auxcell = None
        self.blockdim = getattr(__config__, 'df_df_DF_blockdim', 240)
        self.linear_dep_threshold = df.LINEAR_DEP_THR
        self._j_only = False
# If _cderi_to_save is specified, the 3C-integral tensor will be saved in this file.
        self._cderi_to_save = tempfile.NamedTemporaryFile(dir=lib.param.TMPDIR)
# If _cderi is specified, the 3C-integral tensor will be read from this file
        self._cderi = None
        self._rsh_df = {}  # Range separated Coulomb DF objects
        self._keys = set(self.__dict__.keys()) 
Example #20
Source File: test_auth.py    From jumpserver-python-sdk with GNU General Public License v2.0 5 votes vote down vote up
def test_load_from_f(self):
        with tempfile.NamedTemporaryFile('w+t') as f:
            f.write(self.access_key_val)
            f.flush()
            access_key = AccessKey()
            access_key.load_from_f(f.name)
            self.assertEqual(access_key, self.access_key) 
Example #21
Source File: file.py    From gnocchi with Apache License 2.0 5 votes vote down vote up
def _atomic_file_store(self, dest, data):
        tmpfile = tempfile.NamedTemporaryFile(
            prefix='gnocchi', dir=self.basepath_tmp,
            delete=False)
        tmpfile.write(data)
        tmpfile.close()
        os.rename(tmpfile.name, dest) 
Example #22
Source File: temp_dir.py    From gcp-variant-transforms with Apache License 2.0 5 votes vote down vote up
def create_temp_file(
      self, suffix='', lines=None,
      compression_type=filesystem.CompressionTypes.UNCOMPRESSED):
    """Creates a temporary file in the temporary directory.

    Args:
      suffix (str): The filename suffix of the temporary file (e.g. '.txt')
      lines (List[str]): A list of lines that will be written to the temporary
        file.
      compression_type (str): Specifies compression type of the file. Value
        should be one of ``CompressionTypes``.
    Returns:
      The name of the temporary file created.
    Raises:
      ValueError: If ``compression_type`` is unsupported.
    """
    f = tempfile.NamedTemporaryFile(delete=False,
                                    dir=self._tempdir,
                                    suffix=suffix)
    if not lines:
      return f.name
    if compression_type in (filesystem.CompressionTypes.UNCOMPRESSED,
                            filesystem.CompressionTypes.AUTO):
      f.write(''.join(lines))
    elif compression_type == filesystem.CompressionTypes.GZIP:
      with gzip.GzipFile(f.name, 'w') as gzip_file:
        gzip_file.write(''.join(lines))
    elif compression_type == filesystem.CompressionTypes.BZIP2:
      with bz2.BZ2File(f.name, 'w') as bzip_file:
        bzip_file.write(''.join(lines))
    else:
      raise ValueError('Unsupported CompressionType.')

    return f.name 
Example #23
Source File: vcfio_test.py    From gcp-variant-transforms with Apache License 2.0 5 votes vote down vote up
def setUp(self):
    super(VcfSinkTest, self).setUp()
    self.path = tempfile.NamedTemporaryFile(suffix='.vcf').name
    self.variants, self.variant_lines = zip(
        (_get_sample_variant_1(), VCF_LINE_1),
        (_get_sample_variant_2(), VCF_LINE_2),
        (_get_sample_variant_3(), VCF_LINE_3),
        (_get_sample_non_variant(), GVCF_LINE)) 
Example #24
Source File: github.py    From spleeter with MIT License 5 votes vote down vote up
def download(self, name, path):
        """ Download model denoted by the given name to disk.

        :param name: Name of the model to download.
        :param path: Path of the directory to save model into.
        """
        url = '{}/{}/{}/{}/{}.tar.gz'.format(
            self._host,
            self._repository,
            self.RELEASE_PATH,
            self._release,
            name)
        get_logger().info('Downloading model archive %s', url)
        with requests.get(url, stream=True) as response:
            response.raise_for_status()
            archive = NamedTemporaryFile(delete=False)
            try:
                with archive as stream:
                    # Note: check for chunk size parameters ?
                    for chunk in response.iter_content(chunk_size=8192):
                        if chunk:
                            stream.write(chunk)
                get_logger().info('Validating archive checksum')
                if compute_file_checksum(archive.name) != self.checksum(name):
                    raise IOError('Downloaded file is corrupted, please retry')
                get_logger().info('Extracting downloaded %s archive', name)
                with tarfile.open(name=archive.name) as tar:
                    tar.extractall(path=path)
            finally:
                os.unlink(archive.name)
        get_logger().info('%s model file(s) extracted', name) 
Example #25
Source File: setup.py    From scarlet with MIT License 5 votes vote down vote up
def has_flag(compiler, flagname):
    """Return a boolean indicating whether a flag name is supported on
    the specified compiler.
    """
    import tempfile

    with tempfile.NamedTemporaryFile("w", suffix=".cpp") as f:
        f.write("int main (int argc, char **argv) { return 0; }")
        try:
            compiler.compile([f.name], extra_postargs=[flagname])
        except setuptools.distutils.errors.CompileError:
            return False
    return True 
Example #26
Source File: df.py    From pyscf with Apache License 2.0 5 votes vote down vote up
def reset(self, cell=None):
        if cell is not None:
            self.cell = cell
        self.auxcell = None
        self._cderi = None
        self._cderi_to_save = tempfile.NamedTemporaryFile(dir=lib.param.TMPDIR)
        self._rsh_df = {}
        return self 
Example #27
Source File: test_outcore.py    From pyscf with Apache License 2.0 5 votes vote down vote up
def test_aux_e1(self):
        tmpfile = tempfile.NamedTemporaryFile(dir=lib.param.TMPDIR)
        numpy.random.seed(1)
        kptij_lst = numpy.random.random((3,2,3))
        kptij_lst[0] = 0
        outcore.aux_e1(cell, cell, tmpfile.name, aosym='s2', comp=1,
                       kptij_lst=kptij_lst, verbose=0)
        refk = incore.aux_e2(cell, cell, aosym='s2', kptij_lst=kptij_lst)
        with h5py.File(tmpfile.name, 'r') as f:
            nao = cell.nao_nr()
            idx = numpy.tril_indices(nao)
            idx = idx[0] * nao + idx[1]
            self.assertTrue(numpy.allclose(refk[0,idx], f['eri_mo/0'].value.T))
            self.assertTrue(numpy.allclose(refk[1], f['eri_mo/1'].value.T))
            self.assertTrue(numpy.allclose(refk[2], f['eri_mo/2'].value.T)) 
Example #28
Source File: hf.py    From pyscf with Apache License 2.0 5 votes vote down vote up
def __init__(self, mol):
        if not mol._built:
            sys.stderr.write('Warning: %s must be initialized before calling SCF.\n'
                             'Initialize %s in %s\n' % (mol, mol, self))
            mol.build()
        self.mol = mol
        self.verbose = mol.verbose
        self.max_memory = mol.max_memory
        self.stdout = mol.stdout

# If chkfile is muted, SCF intermediates will not be dumped anywhere.
        if MUTE_CHKFILE:
            self.chkfile = None
        else:
# the chkfile will be removed automatically, to save the chkfile, assign a
# filename to self.chkfile
            self._chkfile = tempfile.NamedTemporaryFile(dir=lib.param.TMPDIR)
            self.chkfile = self._chkfile.name

##################################################
# don't modify the following attributes, they are not input options
        self.mo_energy = None
        self.mo_coeff = None
        self.mo_occ = None
        self.e_tot = 0
        self.converged = False
        self.callback = None
        self.scf_summary = {}

        self.opt = None
        self._eri = None # Note: self._eri requires large amount of memory

        keys = set(('conv_tol', 'conv_tol_grad', 'max_cycle', 'init_guess',
                    'DIIS', 'diis', 'diis_space', 'diis_start_cycle',
                    'diis_file', 'diis_space_rollback', 'damp', 'level_shift',
                    'direct_scf', 'direct_scf_tol', 'conv_check'))
        self._keys = set(self.__dict__.keys()).union(keys) 
Example #29
Source File: test_h2o.py    From pyscf with Apache License 2.0 5 votes vote down vote up
def test_init_guess_atom(self):
        dm = scf.hf.init_guess_by_atom(mol)
        s = scf.hf.get_ovlp(mol)
        occ, mo = scipy.linalg.eigh(dm, s, type=2)
        ftmp = tempfile.NamedTemporaryFile(dir=lib.param.TMPDIR)
        scf.chkfile.dump_scf(mol, ftmp.name, 0, occ, mo, occ)
        self.assertAlmostEqual(numpy.linalg.norm(dm), 3.064429619915702, 8)

        mf = scf.hf.RHF(mol)
        dm0 = scf.rhf.init_guess_by_chkfile(mol, ftmp.name, project=False)
        dm1 = mf.init_guess_by_atom(mol)
        self.assertTrue(numpy.allclose(dm0, dm1))

        mf = scf.DHF(mol)
        dm0 = scf.dhf.init_guess_by_chkfile(mol, ftmp.name, project=False)
        dm1 = mf.init_guess_by_atom(mol)
        self.assertTrue(numpy.allclose(dm0, dm1))

        pmol = gto.M(atom=mol.atom, basis='ccpvdz')
        pmol.cart = True
        dm = scf.hf.init_guess_by_atom(pmol)
        self.assertAlmostEqual(numpy.linalg.norm(dm), 2.923422868807739, 8)

        pmol = gto.M(atom='ghost-O 0 0 0; H 0 0 0.5; H 0 0.5 0', basis='ccpvdz')
        dm = scf.hf.init_guess_by_atom(pmol)
        self.assertAlmostEqual(numpy.linalg.norm(dm), 0.86450726178750226, 8) 
Example #30
Source File: dwdownloader.py    From daily-wallpaper with MIT License 5 votes vote down vote up
def download(url):
    if url.startswith('file://'):
        filename = url[7:]
        if os.path.exists(filename):
            if os.path.exists(comun.POTD):
                md5_old = md5(comun.POTD)
            md5_new = md5(filename)
            if md5_old == md5_new:
                return False
            shutil.copy(filename, comun.POTD)
            return True
        return False
    try:
        r = requests.get(url, stream=True)
        if r.status_code == 200:
            if os.path.exists(comun.POTD):
                md5_old = md5(comun.POTD)
                tempfilename = tempfile.NamedTemporaryFile().name
                with open(tempfilename, 'wb') as f:
                    for chunk in r.iter_content(1024):
                        f.write(chunk)
                md5_new = md5(tempfilename)
                if md5_old == md5_new:
                    os.remove(tempfilename)
                else:
                    os.remove(comun.POTD)
                    shutil.move(tempfilename, comun.POTD)
                    return True
            else:
                with open(comun.POTD, 'wb') as f:
                    for chunk in r.iter_content(1024):
                        f.write(chunk)
                    return True
    except Exception as e:
        print(e)
    return False