Python tempfile.mkdtemp() Examples
The following are 30
code examples of tempfile.mkdtemp().
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
You may also want to check out all available functions/classes of the module
tempfile
, or try the search function
.

Example #1
Source File: demo.py From svviz with MIT License | 26 votes |
def downloadDemo(which): try: downloadDir = tempfile.mkdtemp() archivePath = "{}/svviz-data.zip".format(downloadDir) # logging.info("Downloading...") downloadWithProgress("http://svviz.github.io/svviz/assets/examples/{}.zip".format(which), archivePath) logging.info("Decompressing...") archive = zipfile.ZipFile(archivePath) archive.extractall("{}".format(downloadDir)) if not os.path.exists("svviz-examples"): os.makedirs("svviz-examples/") shutil.move("{temp}/{which}".format(temp=downloadDir, which=which), "svviz-examples/") except Exception as e: print("error downloading and decompressing example data: {}".format(e)) return False if not os.path.exists("svviz-examples"): print("error finding example data after download and decompression") return False return True
Example #2
Source File: export.py From svviz with MIT License | 6 votes |
def convertSVG(insvg, outformat, converter): outdir = tempfile.mkdtemp() inpath = "{}/original.svg".format(outdir) infile = open(inpath, "w") infile.write(insvg) infile.flush() infile.close() outpath = "{}/converted.{}".format(outdir, outformat) if converter == "webkittopdf": exportData = _convertSVG_webkitToPDF(inpath, outpath, outformat) elif converter == "librsvg": exportData = _convertSVG_rsvg_convert(inpath, outpath, outformat) elif converter == "inkscape": exportData = _convertSVG_inkscape(inpath, outpath, outformat) return exportData
Example #3
Source File: insertsizes.py From svviz with MIT License | 6 votes |
def plotInsertSizeDistribution(isd, sampleName, dataHub): try: from rpy2 import robjects as ro d = tempfile.mkdtemp() filename = os.path.join(d, sampleName) if not filename.endswith(".png"): filename += ".png" ro.r.png(filename, res=250, width=1200, height=1200) alleles = ["alt", "ref", "amb"] others = [[len(chosenSet) for chosenSet in dataHub.samples[sampleName].chosenSets(allele)] for allele in alleles] plotting.ecdf([isd.insertSizes]+others, ["average"]+alleles, xlab="Insert size (bp)", main=sampleName, legendWhere="bottomright", lwd=2) ro.r["dev.off"]() data = open(filename).read() return data except ImportError: return None
Example #4
Source File: validate_submission.py From neural-fingerprinting with BSD 3-Clause "New" or "Revised" License | 6 votes |
def main(args): print_in_box('Validating submission ' + args.submission_filename) random.seed() temp_dir = args.temp_dir delete_temp_dir = False if not temp_dir: temp_dir = tempfile.mkdtemp() logging.info('Created temporary directory: %s', temp_dir) delete_temp_dir = True validator = validate_submission_lib.SubmissionValidator(temp_dir, args.use_gpu) if validator.validate_submission(args.submission_filename, args.submission_type): print_in_box('Submission is VALID!') else: print_in_box('Submission is INVALID, see log messages for details') if delete_temp_dir: logging.info('Deleting temporary directory: %s', temp_dir) subprocess.call(['rm', '-rf', temp_dir])
Example #5
Source File: projectfilefolderhandle.py From CAMISIM with Apache License 2.0 | 6 votes |
def __init__(self, tmp_dir, output_dir, time_stamp=None, logfile=None, verbose=True, debug=False): """ Constructor @param tmp_dir: Directory for temporary data @type tmp_dir: str | unicode @param output_dir: Directory where final data will be placed @type output_dir: str | unicode @param time_stamp: timestamp as string @type time_stamp: str | unicode @param logfile: file | FileIO | StringIO | basestring @param verbose: Not verbose means that only warnings and errors will be past to stream @type verbose: bool @param debug: Display debug messages @type debug: bool """ assert isinstance(tmp_dir, basestring) assert isinstance(output_dir, basestring) assert time_stamp is None or isinstance(time_stamp, basestring) self._tmp_dir = tempfile.mkdtemp(dir=tmp_dir) self._directory_output = output_dir self._time_stamp = time_stamp if time_stamp is None: self._time_stamp = datetime.datetime.fromtimestamp(time.time()).strftime('%Y.%m.%d_%H.%M.%S') super(ProjectFileFolderHandle, self).__init__(logfile, verbose, debug)
Example #6
Source File: lambda_function_builder.py From sqs-s3-logger with Apache License 2.0 | 6 votes |
def build_package(): build_dir = tempfile.mkdtemp(prefix='lambda_package_') install_packages(build_dir, REQUIRED_PACKAGES) for f in REQUIRED_FILES: shutil.copyfile( src=os.path.join(module_path, f), dst=os.path.join(build_dir, f) ) out_file = os.path.join( tempfile.mkdtemp(prefix='lambda_package_built'), 'sqs_s3_logger_lambda_{}.zip'.format(datetime.datetime.now().isoformat()) ) LOGGER.info('Creating a function package file at {}'.format(out_file)) archive(build_dir, out_file) return out_file
Example #7
Source File: test_config.py From Paradrop with Apache License 2.0 | 6 votes |
def test_revert_config(): """ Test the revertConfig function """ from paradrop.core.config import osconfig # Need to make a writable location for our config files. settings.UCI_CONFIG_DIR = tempfile.mkdtemp() settings.UCI_BACKUP_DIR = tempfile.mkdtemp() update = UpdateObject({'name': 'test'}) update.old = None update.new = MagicMock() osconfig.revertConfig(update, "network") # Clean up our config dir pdos.remove(settings.UCI_CONFIG_DIR) pdos.remove(settings.UCI_BACKUP_DIR)
Example #8
Source File: generator_utils_test.py From fine-lm with MIT License | 6 votes |
def testGetOrGenerateTxtVocab(self): data_dir = tempfile.mkdtemp(dir=self.get_temp_dir()) test_file = os.path.join(self.get_temp_dir(), "test.txt") with tf.gfile.Open(test_file, "w") as outfile: outfile.write("a b c\n") outfile.write("d e f\n") # Create a vocab over the test file. vocab1 = generator_utils.get_or_generate_txt_vocab( data_dir, "test.voc", 20, test_file) self.assertTrue(tf.gfile.Exists(os.path.join(data_dir, "test.voc"))) self.assertIsNotNone(vocab1) # Append a new line to the test file which would change the vocab if # the vocab were not being read from file. with tf.gfile.Open(test_file, "a") as outfile: outfile.write("g h i\n") vocab2 = generator_utils.get_or_generate_txt_vocab( data_dir, "test.voc", 20, test_file) self.assertTrue(tf.gfile.Exists(os.path.join(data_dir, "test.voc"))) self.assertIsNotNone(vocab2) self.assertEqual(vocab1.dump(), vocab2.dump())
Example #9
Source File: dotplots.py From svviz with MIT License | 5 votes |
def dotplot2(s1, s2, wordsize=5, overlap=5, verbose=1): """ verbose = 0 (no progress), 1 (progress if s1 and s2 are long) or 2 (progress in any case) """ doProgress = False if verbose > 1 or len(s1)*len(s2) > 1e6: doProgress = True mat = numpy.ones(((len(s1)-wordsize)/overlap+2, (len(s2)-wordsize)/overlap+2)) for i in range(0, len(s1)-wordsize, overlap): if i % 1000 == 0 and doProgress: logging.info(" dotplot progress: {} of {} rows done".format(i, len(s1)-wordsize)) word1 = s1[i:i+wordsize] for j in range(0, len(s2)-wordsize, overlap): word2 = s2[j:j+wordsize] if word1 == word2 or word1 == word2[::-1]: mat[i/overlap, j/overlap] = 0 imgData = None tempDir = tempfile.mkdtemp() try: path = os.path.join(tempDir, "dotplot.png") misc.imsave(path, mat) imgData = open(path).read() except Exception as e: logging.error("Error generating dotplots:'{}'".format(e)) finally: shutil.rmtree(tempDir) return imgData
Example #10
Source File: collector.py From incubator-spot with Apache License 2.0 | 5 votes |
def __init__(self, datatype, topic, skip_conversion, **conf): self._logger = logging.getLogger('SPOT.INGEST.COLLECTOR') self._logger.info('Initializing Distributed Collector process...') self._datatype = datatype self._interval = conf['ingestion_interval'] self._isalive = True self._process_opts = conf['pipelines'][datatype]['process_opt'] self._processes = conf['collector_processes'] self._producer_kwargs = conf['producer'] self._skip_conversion = skip_conversion self._topic = topic # .............................init FileObserver self.FileWatcher = FileWatcher(**conf['file_watcher']) # .............................set up local staging area self._tmpdir = mkdtemp(prefix='_DC.', dir=conf['pipelines'][datatype]['local_staging']) self._logger.info('Use directory "{0}" as local staging area.'.format(self._tmpdir)) # .............................define a process pool object self._pool = Pool(self._processes, _init_child, [self._tmpdir]) self._logger.info('Master Collector will use {0} parallel processes.' .format(self._processes)) signal.signal(signal.SIGUSR1, self.kill) self._logger.info('Initialization completed successfully!')
Example #11
Source File: tempfile.py From aegea with Apache License 2.0 | 5 votes |
def mkdtemp(suffix=None, prefix=None, dir=None): """ Wrap `tempfile.mkdtemp()` to make the suffix and prefix optional (like Python 3.5). """ kwargs = {k: v for (k, v) in dict(suffix=suffix, prefix=prefix, dir=dir).items() if v is not None} return old_mkdtemp(**kwargs)
Example #12
Source File: tempfile.py From aegea with Apache License 2.0 | 5 votes |
def __init__(self, suffix=None, prefix=None, dir=None): self.name = mkdtemp(suffix, prefix, dir) self._finalizer = finalize( self, self._cleanup, self.name, warn_message="Implicitly cleaning up {!r}".format(self))
Example #13
Source File: bindiff.py From BASS with GNU General Public License v2.0 | 5 votes |
def bindiff_pickle_export(self, sample, is_64_bit = True, timeout = None): """ Load a sample into IDA Pro, perform autoanalysis and export a pickle file. :param sample: The sample's path :param is_64_bit: If the sample needs to be analyzed by the 64 bit version of IDA :param timeout: Timeout for the analysis in seconds :return: The file name of the exported pickle database. The file needs to be deleted by the caller. Returns None on error. """ data_to_send = { "timeout": timeout, "is_64_bit": is_64_bit} url = "%s/binexport_pickle" % next(self._urls) log.debug("curl -XPOST --data '%s' '%s'", json.dumps(data_to_send), url) response = requests.post(url, data = data_to_send, files = {os.path.basename(sample): open(sample, "rb")}) if response.status_code == 200: handle_tar, path_tar = tempfile.mkstemp(suffix = ".tar.gz") with os.fdopen(handle_tar, "wb") as f: map(f.write, response.iter_content(1024)) directory = tempfile.mkdtemp() subprocess.check_call(["tar", "xf", path_tar], cwd = directory) handle_bindiff, output_bindiff = tempfile.mkstemp(suffix = ".BinExport") with os.fdopen(handle_bindiff, "wb") as f: with open(os.path.join(directory, "output.BinExport"), "rb") as f2: shutil.copyfileobj(f2, f) handle_pickle, output_pickle = tempfile.mkstemp(suffix = ".pickle") with os.fdopen(handle_pickle, "wb") as f: with open(os.path.join(directory, "output.pickle"), "rb") as f2: shutil.copyfileobj(f2, f) os.unlink(path_tar) shutil.rmtree(directory) return output_bindiff, output_pickle else: log.error("Bindiff server responded with status code %d: %s", response.status_code, response.content) return None
Example #14
Source File: ida_service.py From BASS with GNU General Public License v2.0 | 5 votes |
def bindiff_export(): """ Run the IDA Pro autoanalysis on the input file and export a BinExport database. :param input: The input file :return: Status code 200 and a JSON object containing the output database name in key 'output', or status code 422 on invalid parameters, 408 on timeout or 500 on other errors. """ logger.info("bindiff_export called") directory = None try: directory = tempfile.mkdtemp() if len(request.files) != 1: return make_response(jsonify(error = "Missing file parameter"), 422) filename, file_ = request.files.items()[0] input_ = os.path.join(directory, sanitize_filename(filename)) file_.save(input_) output = os.path.join(directory, "output.BinExport") timeout = request.form.get('timeout', None) is_64_bit = request.form.get('is_64_bit', True) try: run_ida(input_, is_64_bit, timeout, os.path.join(PREFIX, "export_binexport_pickle.py"), "binexport", output) logger.info("Command completed successfully") return send_file(open(output, "rb"), as_attachment = True, attachment_filename = "%s.BinExport" % filename, mimetype = "application/binary") except TimeoutError: return jsonify(error = "Program execution timed out"), 408 except OSError as err: return jsonify(error = "Program execution failed with error %d" % err.errno), 500 finally: if directory is not None: shutil.rmtree(directory)
Example #15
Source File: ida_service.py From BASS with GNU General Public License v2.0 | 5 votes |
def bindiff_pickle_export(): """ Run the IDA Pro autoanalysis on the input file and export a BinExport database. :param input: The input file :return: Status code 200 and a JSON object containing the output database name in key 'output', or status code 422 on invalid parameters, 408 on timeout or 500 on other errors. """ logger.info("bindiff_pickle_export called") directory = None try: directory = tempfile.mkdtemp() if len(request.files) != 1: return make_response(jsonify(error = "Missing file parameter"), 422) filename, file_ = request.files.items()[0] input_ = os.path.join(directory, sanitize_filename(filename)) file_.save(input_) output_binexport = os.path.join(directory, "output.BinExport") output_pickle = os.path.join(directory, "output.pickle") timeout = request.form.get('timeout', None) is_64_bit = request.form.get('is_64_bit', True) try: run_ida(input_, is_64_bit, timeout, os.path.join(PREFIX, "export_binexport_pickle.py"), "binexport_pickle", output_binexport, output_pickle) logger.info("Command completed successfully") output_tar = os.path.join(directory, "output.tar.gz") subprocess.check_call(["tar", "czf", output_tar, os.path.relpath(output_binexport, directory), os.path.relpath(output_pickle, directory)], cwd = directory) return send_file(open(output_tar, "rb"), as_attachment = True, attachment_filename = "%s.tar.gz" % filename, mimetype = "application/gzip") except TimeoutError: return jsonify(error = "Program execution timed out"), 408 except OSError as err: return jsonify(error = "Program execution failed with error %d" % err.errno), 500 finally: if directory is not None: shutil.rmtree(directory)
Example #16
Source File: ida_service.py From BASS with GNU General Public License v2.0 | 5 votes |
def pickle_export(): """ Run the IDA Pro autoanalysis on the input file and export a BinExport database. :param input: The input file :return: Status code 200 and a JSON object containing the output database name in key 'output', or status code 422 on invalid parameters, 408 on timeout or 500 on other errors. """ logger.info("bindiff_export called") directory = None try: directory = tempfile.mkdtemp() if len(request.files) != 1: return make_response(jsonify(error = "Missing file parameter"), 422) filename, file_ = request.files.items()[0] input_ = os.path.join(directory, sanitize_filename(filename)) file_.save(input_) output = os.path.join(directory, "output.pickle") timeout = request.form.get('timeout', None) is_64_bit = request.form.get('is_64_bit', False) try: run_ida(input_, is_64_bit, timeout, os.path.join(PREFIX, "export_binexport_pickle.py"), "pickle", output) logger.info("Command completed successfully") return send_file(open(output, "rb"), as_attachment = True, attachment_filename = "%s.pickle" % filename, mimetype = "application/binary") except TimeoutError: return jsonify(error = "Program execution timed out"), 408 except OSError as err: return jsonify(error = "Program execution failed with error %d" % err.errno), 500 finally: if directory is not None: shutil.rmtree(directory)
Example #17
Source File: ida_service.py From BASS with GNU General Public License v2.0 | 5 votes |
def bindiff_compare(): logger.info("bindiff_compare called") input_dir = tempfile.mkdtemp() output_dir = tempfile.mkdtemp() try: primary = os.path.join(input_dir, "primary") secondary = os.path.join(input_dir, "secondary") try: request.files["primary"].save(primary) request.files["secondary"].save(secondary) except KeyError: return make_response(jsonify(error="Missing parameter 'primary' or 'secondary'"), 422) timeout = request.form.get('timeout', None) cmd = (BINDIFF_DIFFER, "--primary", primary, "--secondary", secondary, "--output_dir", output_dir) logger.info("Executing %s", " ".join("'%s'" % x for x in cmd)) check_call(cmd, cwd = output_dir, timeout = timeout) db_path = [os.path.join(output_dir, x) for x in os.listdir(output_dir)] if len(db_path) != 1: return make_response(jsonify(error = "BinDiff generated 0 or several output files"), 500) return send_file(open(db_path[0], "rb"), as_attachment = True, attachment_filename = "BinDiff.sqlite3", mimetype = "application/binary") except OSError as err: if err.errno == -9: return make_response(jsonify(error = "Program execution timed out"), 408) else: return make_response(jsonify(error = "Program execution failed with error %d" % err.errno), 500) finally: shutil.rmtree(input_dir) shutil.rmtree(output_dir)
Example #18
Source File: ida_service.py From BASS with GNU General Public License v2.0 | 5 votes |
def bindiff_export(): """ Run the IDA Pro autoanalysis on the input file and export a BinExport database. :param input: The input file :return: Status code 200 and a JSON object containing the output database name in key 'output', or status code 422 on invalid parameters, 408 on timeout or 500 on other errors. """ logger.info("bindiff_export called") directory = None try: directory = tempfile.mkdtemp() if len(request.files) != 1: return make_response(jsonify(error = "Missing file parameter"), 422) filename, file_ = request.files.items()[0] input_ = os.path.join(directory, sanitize_filename(filename)) file_.save(input_) output = os.path.join(directory, "output.BinExport") timeout = request.form.get('timeout', None) is_64_bit = request.form.get('is_64_bit', True) try: run_ida(input_, is_64_bit, timeout, os.path.join(PREFIX, "export_binexport_pickle.py"), "binexport", output) logger.info("Command completed successfully") return send_file(open(output, "rb"), as_attachment = True, attachment_filename = "%s.BinExport" % filename, mimetype = "application/binary") except TimeoutError: return jsonify(error = "Program execution timed out"), 408 except OSError as err: return jsonify(error = "Program execution failed with error %d" % err.errno), 500 finally: if directory is not None: shutil.rmtree(directory)
Example #19
Source File: ida_service.py From BASS with GNU General Public License v2.0 | 5 votes |
def bindiff_pickle_export(): """ Run the IDA Pro autoanalysis on the input file and export a BinExport database. :param input: The input file :return: Status code 200 and a JSON object containing the output database name in key 'output', or status code 422 on invalid parameters, 408 on timeout or 500 on other errors. """ logger.info("bindiff_pickle_export called") directory = None try: directory = tempfile.mkdtemp() if len(request.files) != 1: return make_response(jsonify(error = "Missing file parameter"), 422) filename, file_ = request.files.items()[0] input_ = os.path.join(directory, sanitize_filename(filename)) file_.save(input_) output_binexport = os.path.join(directory, "output.BinExport") output_pickle = os.path.join(directory, "output.pickle") timeout = request.form.get('timeout', None) is_64_bit = request.form.get('is_64_bit', True) try: run_ida(input_, is_64_bit, timeout, os.path.join(PREFIX, "export_binexport_pickle.py"), "binexport_pickle", output_binexport, output_pickle) logger.info("Command completed successfully") output_tar = os.path.join(directory, "output.tar.gz") subprocess.check_call(["tar", "czf", output_tar, os.path.relpath(output_binexport, directory), os.path.relpath(output_pickle, directory)], cwd = directory) return send_file(open(output_tar, "rb"), as_attachment = True, attachment_filename = "%s.tar.gz" % filename, mimetype = "application/gzip") except TimeoutError: return jsonify(error = "Program execution timed out"), 408 except OSError as err: return jsonify(error = "Program execution failed with error %d" % err.errno), 500 finally: if directory is not None: shutil.rmtree(directory)
Example #20
Source File: ida_service.py From BASS with GNU General Public License v2.0 | 5 votes |
def pickle_export(): """ Run the IDA Pro autoanalysis on the input file and export a BinExport database. :param input: The input file :return: Status code 200 and a JSON object containing the output database name in key 'output', or status code 422 on invalid parameters, 408 on timeout or 500 on other errors. """ logger.info("bindiff_export called") directory = None try: directory = tempfile.mkdtemp() if len(request.files) != 1: return make_response(jsonify(error = "Missing file parameter"), 422) filename, file_ = request.files.items()[0] input_ = os.path.join(directory, sanitize_filename(filename)) file_.save(input_) output = os.path.join(directory, "output.pickle") timeout = request.form.get('timeout', None) is_64_bit = request.form.get('is_64_bit', False) try: run_ida(input_, is_64_bit, timeout, os.path.join(PREFIX, "export_binexport_pickle.py"), "pickle", output) logger.info("Command completed successfully") return send_file(open(output, "rb"), as_attachment = True, attachment_filename = "%s.pickle" % filename, mimetype = "application/binary") except TimeoutError: return jsonify(error = "Program execution timed out"), 408 except OSError as err: return jsonify(error = "Program execution failed with error %d" % err.errno), 500 finally: if directory is not None: shutil.rmtree(directory)
Example #21
Source File: ida_service.py From BASS with GNU General Public License v2.0 | 5 votes |
def bindiff_compare(): logger.info("bindiff_compare called") input_dir = tempfile.mkdtemp() output_dir = tempfile.mkdtemp() try: primary = os.path.join(input_dir, "primary") secondary = os.path.join(input_dir, "secondary") try: request.files["primary"].save(primary) request.files["secondary"].save(secondary) except KeyError: return make_response(jsonify(error="Missing parameter 'primary' or 'secondary'"), 422) timeout = request.form.get('timeout', None) cmd = (BINDIFF_DIFFER, "--primary", primary, "--secondary", secondary, "--output_dir", output_dir) logger.info("Executing %s", " ".join("'%s'" % x for x in cmd)) check_call(cmd, cwd = output_dir, timeout = timeout) db_path = [os.path.join(output_dir, x) for x in os.listdir(output_dir)] if len(db_path) != 1: return make_response(jsonify(error = "BinDiff generated 0 or several output files"), 500) return send_file(open(db_path[0], "rb"), as_attachment = True, attachment_filename = "BinDiff.sqlite3", mimetype = "application/binary") except OSError as err: if err.errno == -9: return make_response(jsonify(error = "Program execution timed out"), 408) else: return make_response(jsonify(error = "Program execution failed with error %d" % err.errno), 500) finally: shutil.rmtree(input_dir) shutil.rmtree(output_dir)
Example #22
Source File: test_static.py From cherrypy with BSD 3-Clause "New" or "Revised" License | 5 votes |
def ensure_unicode_filesystem(): """ TODO: replace with simply pytest fixtures once webtest.TestCase no longer implies unittest. """ tmpdir = py.path.local(tempfile.mkdtemp()) try: _check_unicode_filesystem(tmpdir) finally: tmpdir.remove()
Example #23
Source File: test.py From mmdetection with Apache License 2.0 | 5 votes |
def collect_results_cpu(result_part, size, tmpdir=None): rank, world_size = get_dist_info() # create a tmp dir if it is not specified if tmpdir is None: MAX_LEN = 512 # 32 is whitespace dir_tensor = torch.full((MAX_LEN, ), 32, dtype=torch.uint8, device='cuda') if rank == 0: tmpdir = tempfile.mkdtemp() tmpdir = torch.tensor( bytearray(tmpdir.encode()), dtype=torch.uint8, device='cuda') dir_tensor[:len(tmpdir)] = tmpdir dist.broadcast(dir_tensor, 0) tmpdir = dir_tensor.cpu().numpy().tobytes().decode().rstrip() else: mmcv.mkdir_or_exist(tmpdir) # dump the part result to the dir mmcv.dump(result_part, osp.join(tmpdir, f'part_{rank}.pkl')) dist.barrier() # collect all parts if rank != 0: return None else: # load results of all parts from tmp dir part_list = [] for i in range(world_size): part_file = osp.join(tmpdir, f'part_{i}.pkl') part_list.append(mmcv.load(part_file)) # sort the results ordered_results = [] for res in zip(*part_list): ordered_results.extend(list(res)) # the dataloader may pad some samples ordered_results = ordered_results[:size] # remove tmp dir shutil.rmtree(tmpdir) return ordered_results
Example #24
Source File: test_robustness.py From mmdetection with Apache License 2.0 | 5 votes |
def collect_results(result_part, size, tmpdir=None): rank, world_size = get_dist_info() # create a tmp dir if it is not specified if tmpdir is None: MAX_LEN = 512 # 32 is whitespace dir_tensor = torch.full((MAX_LEN, ), 32, dtype=torch.uint8, device='cuda') if rank == 0: tmpdir = tempfile.mkdtemp() tmpdir = torch.tensor( bytearray(tmpdir.encode()), dtype=torch.uint8, device='cuda') dir_tensor[:len(tmpdir)] = tmpdir dist.broadcast(dir_tensor, 0) tmpdir = dir_tensor.cpu().numpy().tobytes().decode().rstrip() else: mmcv.mkdir_or_exist(tmpdir) # dump the part result to the dir mmcv.dump(result_part, osp.join(tmpdir, f'part_{rank}.pkl')) dist.barrier() # collect all parts if rank != 0: return None else: # load results of all parts from tmp dir part_list = [] for i in range(world_size): part_file = osp.join(tmpdir, f'part_{i}.pkl') part_list.append(mmcv.load(part_file)) # sort the results ordered_results = [] for res in zip(*part_list): ordered_results.extend(list(res)) # the dataloader may pad some samples ordered_results = ordered_results[:size] # remove tmp dir shutil.rmtree(tmpdir) return ordered_results
Example #25
Source File: validate_and_copy_submissions.py From neural-fingerprinting with BSD 3-Clause "New" or "Revised" License | 5 votes |
def main(args): random.seed() temp_dir = tempfile.mkdtemp() logging.info('Created temporary directory: %s', temp_dir) validator = SubmissionValidator( source_dir=args.source_dir, target_dir=args.target_dir, temp_dir=temp_dir, do_copy=args.copy, use_gpu=args.use_gpu, containers_file=args.containers_file) validator.run() logging.info('Deleting temporary directory: %s', temp_dir) subprocess.call(['rm', '-rf', temp_dir])
Example #26
Source File: dataloader.py From models with MIT License | 5 votes |
def inflate_data_sources(input): import zipfile import tempfile import shutil import os dirpath = tempfile.mkdtemp() # make sure the directory is empty shutil.rmtree(dirpath) os.makedirs(dirpath) # load and extract zip file zf = zipfile.ZipFile(input) zf.extractall(dirpath) extracted_folders = os.listdir(dirpath) return {k.split(".")[0]: os.path.join(dirpath, k) for k in extracted_folders}
Example #27
Source File: mgcluster.py From CAMISIM with Apache License 2.0 | 5 votes |
def __init__( self, mothur_executable, directory_silva_reference, max_processors=1, temp_directory=None, logfile=None, verbose=False, debug=False): """ Constructor @param mothur_executable: File path to mothur binary @type mothur_executable: str | unicode @param directory_silva_reference: Path to directory with SILVA reference database files @type directory_silva_reference: str | unicode @param max_processors: Maximum number of available processors @type max_processors: int | long @param temp_directory: Directory for temporary data @type temp_directory: str | unicode @param logfile: file handler or file path to a log file @type logfile: file | FileIO | StringIO | basestring @param verbose: Not verbose means that only warnings and errors will be past to stream @type verbose: bool @param debug: Display debug messages @type debug: bool """ assert self.validate_file(mothur_executable, executable=True) assert self.validate_dir(directory_silva_reference, file_names=self._silva_ref_files) assert self.validate_number(max_processors, minimum=1) assert self.validate_dir(temp_directory) super(MGCluster, self).__init__(logfile=logfile, verbose=verbose, debug=False) self._mothur_executable = mothur_executable self._tmp_dir = tempfile.mkdtemp(dir=temp_directory) self._max_processors = max_processors self._debug = debug ref_silva_distances = self._get_symbolic_link_path(os.path.join(directory_silva_reference, "mothur_ref_distances")) ref_silva_names = self._get_symbolic_link_path(os.path.join(directory_silva_reference, "mothur_ref_names")) # unique ref_silva_alignment = self._get_symbolic_link_path(os.path.join(directory_silva_reference, "mothur_alignment_ref.fasta")) self._ref_silva_distances = ref_silva_distances self._ref_silva_names = ref_silva_names self._ref_silva_alignment = ref_silva_alignment # local_distance = os.path.join(self._working_dir, "ref.align.dist") self._local_distance = "ref.align.dist"
Example #28
Source File: projectfilefolderhandle_ga.py From CAMISIM with Apache License 2.0 | 5 votes |
def __init__(self, tmp_dir, output_dir, time_stamp=None, logfile=None, verbose=True, debug=False): """ Constructor @param tmp_dir: Directory for temporary data @type tmp_dir: str | unicode @param output_dir: Directory where final data will be placed @type output_dir: str | unicode @param time_stamp: timestamp as string @type time_stamp: str | unicode @param logfile: file | FileIO | StringIO | basestring @param verbose: Not verbose means that only warnings and errors will be past to stream @type verbose: bool @param debug: Display debug messages @type debug: bool """ assert isinstance(tmp_dir, basestring) assert isinstance(output_dir, basestring) assert time_stamp is None or isinstance(time_stamp, basestring) assert self.validate_dir(tmp_dir) self._tmp_dir = tempfile.mkdtemp(dir=tmp_dir) self._directory_output = output_dir self._time_stamp = time_stamp if time_stamp is None: self._time_stamp = datetime.datetime.fromtimestamp(time.time()).strftime('%Y.%m.%d_%H.%M.%S') super(ProjectFileFolderHandle, self).__init__(logfile, verbose, debug) self._make_dir(output_dir)
Example #29
Source File: goldstandardassembly.py From CAMISIM with Apache License 2.0 | 5 votes |
def __init__( self, file_path_samtools="samtools", max_processes=1, tmp_dir=None, logfile=None, verbose=True, debug=False): """ Collection of Methods related to gold standard assemblies @attention: @param file_path_samtools: path to the samtools executable @type file_path_samtools: str | unicode @param max_processes: Maximum number of processes used in parallel @type max_processes: int | long @param tmp_dir: Temp directory for temporary data if needed @type tmp_dir: str | unicode @param logfile: file handler or file path to a log file @type logfile: file | io.FileIO | StringIO.StringIO | basestring @param verbose: Not verbose means that only warnings and errors will be past to stream @type verbose: bool @param debug: Display debug messages @type debug: bool @return: None @rtype: None """ super(GoldStandardAssembly, self).__init__( file_path_samtools=file_path_samtools, max_processes=max_processes, tmp_dir=tmp_dir, logfile=logfile, verbose=verbose, debug=debug ) self._temp_merges_bam_directory = tempfile.mkdtemp(dir=self._tmp_dir) self._bamToGold = os.path.join(os.path.dirname(scripts.__file__), "bamToGold.pl") assert self.validate_file(self._bamToGold)
Example #30
Source File: test_data.py From mlearn with BSD 3-Clause "New" or "Revised" License | 5 votes |
def setUpClass(cls): cls.this_dir = os.path.dirname(os.path.abspath(__file__)) cls.test_dir = tempfile.mkdtemp() os.chdir(cls.test_dir)