Python shutil.rmtree() Examples
The following are 30
code examples of shutil.rmtree().
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
You may also want to check out all available functions/classes of the module
shutil
, or try the search function
.

Example #1
Source File: test_gluon_utils.py From dynamic-training-with-apache-mxnet-on-aws with Apache License 2.0 | 7 votes |
def test_multiprocessing_download_successful(): """ test download with multiprocessing """ tmp = tempfile.mkdtemp() tmpfile = os.path.join(tmp, 'README.md') process_list = [] # test it with 10 processes for i in range(10): process_list.append(mp.Process( target=_download_successful, args=(tmpfile,))) process_list[i].start() for i in range(10): process_list[i].join() assert os.path.getsize(tmpfile) > 100, os.path.getsize(tmpfile) # check only one file we want left pattern = os.path.join(tmp, 'README.md*') assert len(glob.glob(pattern)) == 1, glob.glob(pattern) # delete temp dir shutil.rmtree(tmp)
Example #2
Source File: unique_objects.py From vergeml with MIT License | 6 votes |
def __call__(self, args, env): samples_dir = env.get('samples-dir') print("Downloading unique objects to {}.".format(samples_dir)) src_dir = self.download_files([_URL], env=env, dir=env.get('cache-dir')) path = os.path.join(src_dir, "ObjectsAll.zip") zipf = zipfile.ZipFile(path, 'r') zipf.extractall(src_dir) zipf.close() for file in os.listdir(os.path.join(src_dir, "OBJECTSALL")): shutil.copy(os.path.join(src_dir, "OBJECTSALL", file), samples_dir) shutil.rmtree(src_dir) print("Finished downloading unique objects.")
Example #3
Source File: cats_and_dogs.py From vergeml with MIT License | 6 votes |
def __call__(self, args, env): samples_dir = env.get('samples-dir') for label in ("cat", "dog"): dest = os.path.join(samples_dir, label) if os.path.exists(dest): raise VergeMLError("Directory {} already exists in samples dir: {}".format(label, dest)) print("Downloading cats and dogs to {}.".format(samples_dir)) src_dir = self.download_files([(_URL, "catsdogs.zip")], env) path = os.path.join(src_dir, "catsdogs.zip") print("Extracting data.") zipf = zipfile.ZipFile(path, 'r') zipf.extractall(src_dir) zipf.close() for file, dest in (("PetImages/Dog", "dog"), ("PetImages/Cat", "cat")): shutil.copytree(os.path.join(src_dir, file), os.path.join(samples_dir, dest)) shutil.rmtree(src_dir) # WTF? os.unlink(os.path.join(samples_dir, "cat", "666.jpg")) os.unlink(os.path.join(samples_dir, "dog", "11702.jpg")) print("Finished downloading cats and dogs.")
Example #4
Source File: __init__.py From ALF with Apache License 2.0 | 6 votes |
def do_iteration(self, mutation_fn, aggression): """ This method is called with an output mutation filename and a real aggression (see :ref:`aggression`) indicating the amount of aggression the fuzzing algorithm should use. *mutation_fn* is unique for every invokation of :meth:`do_iteration`. It is an error for this method not to write the mutated template to *mutation_fn* before returning a result. If a result is not found, the mutation filename may be written to if needed, but it is not required. If a notable result is found, it should be returned as a :class:`FuzzResult` instance. This will be stored and reported to the ALF central server at the next check-in interval. A return of None indicates a result was not found. The filenames of any temporary files or folders created during execution can be safely removed using :func:`alf.delete`. This is safer than using :func:`os.remove` or :func:`shutil.rmtree` directly. *mutation_fn* does not need to be deleted, it is cleaned up automatically. """ raise NotImplementedError()
Example #5
Source File: workflow.py From wechat-alfred-workflow with MIT License | 6 votes |
def _delete_directory_contents(self, dirpath, filter_func): """Delete all files in a directory. :param dirpath: path to directory to clear :type dirpath: ``unicode`` or ``str`` :param filter_func function to determine whether a file shall be deleted or not. :type filter_func ``callable`` """ if os.path.exists(dirpath): for filename in os.listdir(dirpath): if not filter_func(filename): continue path = os.path.join(dirpath, filename) if os.path.isdir(path): shutil.rmtree(path) else: os.unlink(path) self.logger.debug('deleted : %r', path)
Example #6
Source File: _7zip.py From multibootusb with GNU General Public License v2.0 | 6 votes |
def test_extraction(): import shutil src = 'c:/Users/shinj/Downloads/clonezilla-live-2.5.2-31-amd64.iso' tmp_dir = 'c:/Users/shinj/Documents/tmp' for subdir, pattern in [ ('single_string', 'EFI/'), ('single_list', ['EFI/']), ('multi', ['EFI/', 'syslinux/']), ('all', None) ]: dest_dir = os.path.join(tmp_dir, subdir) if os.path.exists(dest_dir): shutil.rmtree(dest_dir) os.mkdir(dest_dir) args = [src, dest_dir] if pattern is not None: args.append(pattern) print ('Calling extract_iso(%s)' % args) extract_iso(*args)
Example #7
Source File: mainHelp.py From pytorch_NER_BiLSTM_CNN_CRF with Apache License 2.0 | 6 votes |
def save_dictionary(config): """ :param config: config :return: """ if config.save_dict is True: if os.path.exists(config.dict_directory): shutil.rmtree(config.dict_directory) if not os.path.isdir(config.dict_directory): os.makedirs(config.dict_directory) config.word_dict_path = "/".join([config.dict_directory, config.word_dict]) config.label_dict_path = "/".join([config.dict_directory, config.label_dict]) print("word_dict_path : {}".format(config.word_dict_path)) print("label_dict_path : {}".format(config.label_dict_path)) save_dict2file(config.create_alphabet.word_alphabet.words2id, config.word_dict_path) save_dict2file(config.create_alphabet.label_alphabet.words2id, config.label_dict_path) # copy to mulu print("copy dictionary to {}".format(config.save_dir)) shutil.copytree(config.dict_directory, "/".join([config.save_dir, config.dict_directory])) # load data / create alphabet / create iterator
Example #8
Source File: utils_test.py From neural-pipeline with MIT License | 6 votes |
def test_creation(self): if os.path.exists(self.base_dir): shutil.rmtree(self.checkpoints_dir, ignore_errors=True) try: FileStructManager(base_dir=self.base_dir, is_continue=False) except FileStructManager.FSMException as err: self.fail("Raise error when base directory exists: [{}]".format(err)) self.assertFalse(os.path.exists(self.base_dir)) try: FileStructManager(base_dir=self.base_dir, is_continue=False) except FileStructManager.FSMException as err: self.fail("Raise error when base directory exists but empty: [{}]".format(err)) os.makedirs(os.path.join(self.base_dir, 'new_dir')) try: FileStructManager(base_dir=self.base_dir, is_continue=False) except: self.fail("Error initialize when exists non-registered folders in base directory") shutil.rmtree(self.base_dir, ignore_errors=True)
Example #9
Source File: straight_dope_test_utils.py From dynamic-training-with-apache-mxnet-on-aws with Apache License 2.0 | 6 votes |
def _download_straight_dope_notebooks(): """Downloads the Straight Dope Notebooks. Returns: True if it succeeds in downloading the notebooks without error. """ logging.info('Cleaning and setting up notebooks directory "{}"'.format(NOTEBOOKS_DIR)) shutil.rmtree(NOTEBOOKS_DIR, ignore_errors=True) cmd = [GIT_PATH, 'clone', GIT_REPO, NOTEBOOKS_DIR] proc, msg = _run_command(cmd) if proc.returncode != 0: err_msg = 'Error downloading Straight Dope notebooks.\n' err_msg += msg logging.error(err_msg) return False return True
Example #10
Source File: dotplots.py From svviz with MIT License | 5 votes |
def dotplot2(s1, s2, wordsize=5, overlap=5, verbose=1): """ verbose = 0 (no progress), 1 (progress if s1 and s2 are long) or 2 (progress in any case) """ doProgress = False if verbose > 1 or len(s1)*len(s2) > 1e6: doProgress = True mat = numpy.ones(((len(s1)-wordsize)/overlap+2, (len(s2)-wordsize)/overlap+2)) for i in range(0, len(s1)-wordsize, overlap): if i % 1000 == 0 and doProgress: logging.info(" dotplot progress: {} of {} rows done".format(i, len(s1)-wordsize)) word1 = s1[i:i+wordsize] for j in range(0, len(s2)-wordsize, overlap): word2 = s2[j:j+wordsize] if word1 == word2 or word1 == word2[::-1]: mat[i/overlap, j/overlap] = 0 imgData = None tempDir = tempfile.mkdtemp() try: path = os.path.join(tempDir, "dotplot.png") misc.imsave(path, mat) imgData = open(path).read() except Exception as e: logging.error("Error generating dotplots:'{}'".format(e)) finally: shutil.rmtree(tempDir) return imgData
Example #11
Source File: collector.py From incubator-spot with Apache License 2.0 | 5 votes |
def __del__(self): ''' Called when the instance is about to be destroyed. ''' if hasattr(self, '_tmpdir'): self._logger.info('Clean up temporary directory "{0}".'.format(self._tmpdir)) shutil.rmtree(self._tmpdir)
Example #12
Source File: download.py From gog-galaxy-plugin-downloader with GNU General Public License v3.0 | 5 votes |
def fix_plugin_directories(dest): """ Loops through all folders in the output directory, reads the their manifest file, and renames the directory to the standard <platform>_<guid> format """ # Loop through directories in the destination directory for existing_dir in os.listdir(dest): existing_path = os.path.join(dest, existing_dir) # Skip non-directories if not os.path.isdir(existing_path): continue try: with open(os.path.join(existing_path, 'manifest.json')) as m: data = json.load(m) platform = data['platform'] guid = data['guid'] # Close json file m.close() expected_dir = platform + '_' + guid expected_path = os.path.join(dest, expected_dir) if existing_path != expected_path: print('NOTICE: Folder should be "{}", but it is named "{}"' .format(expected_dir, existing_dir)) if os.path.isdir(expected_path): print('NOTICE: Correct pathed plugin already exists,' + ' deleting extra plugin') shutil.rmtree(existing_path) else: print('NOTICE: Renaming folder to proper name') shutil.move(existing_path, expected_path) except (FileNotFoundError, json.decoder.JSONDecodeError, KeyError): print('ERROR: Could not read plugin data from {} folder' .format(existing_path))
Example #13
Source File: download.py From gog-galaxy-plugin-downloader with GNU General Public License v3.0 | 5 votes |
def delete_old_plugins(data, dest): """ Deletes versions of plugins that don't match the yaml manifest. In theory this should only be older versions, but any version that doesn't match the yaml definition will be deleted This explicitly does not touch other directories that do not match the known plugin names. If the version doesn't match the yaml definition, the directory is removed """ # Loop over each plugin for name, data in data.items(): expected_plugin_dir = name + '_' + data['guid'] # Loop through directories in the destination directory for item in os.listdir(dest): full_path = os.path.join(dest, item) # Skip non-directories if not os.path.isdir(full_path): continue # Skip directory names that are in the valid plugin directory array if item == expected_plugin_dir: continue # If any other directory begins with <plugin_name>_, delete it if item.startswith(name + '_'): print('Deleting wrong version "{}" from "{}"' .format(item, dest)) shutil.rmtree(full_path)
Example #14
Source File: cifar_100.py From vergeml with MIT License | 5 votes |
def download(env): """60000 tiny colour images in 100 classes. The CIFAR-100 dataset consists of 60000 32x32 colour images in 100 classes, with 6000 images per class. There are 500 training images and 100 testing images per class. Credits: Alex Krizhevsky Vinod Nair Geoffrey Hinton For more information visit: https://www.cs.toronto.edu/~kriz/cifar.html""" url = "https://www.cs.toronto.edu/~kriz/cifar-100-python.tar.gz" samples_dir = env.get('base.samples_dir') print("Downloading CIFAR-100 to {}.".format(samples_dir)) src_dir = download_files([url], dir=env.get('base.cache_dir')) path = os.path.join(src_dir, "cifar-100-python.tar.gz") tarf = tarfile.TarFile(path, 'r:gz') tarf.extractall(src_dir) tarf.close() shutil.rmtree(src_dir) print("Finished downloading CIFAR-100.")
Example #15
Source File: cifar_10.py From vergeml with MIT License | 5 votes |
def download(env): """60000 tiny colour images in 10 classes. The CIFAR-10 dataset consists of 60000 32x32 colour images in 10 classes, with 6000 images per class. There are 50000 training images and 10000 test images. Credits: Alex Krizhevsky Vinod Nair Geoffrey Hinton For more information visit: https://www.cs.toronto.edu/~kriz/cifar.html""" url = "https://www.cs.toronto.edu/~kriz/cifar-10-python.tar.gz" samples_dir = env.get('base.samples_dir') print("Downloading CIFAR-10 to {}.".format(samples_dir)) src_dir = download_files([url], dir=env.get('base.cache_dir')) path = os.path.join(src_dir, "cifar-10-python.tar.gz") tarf = tarfile.TarFile(path, 'r:gz') tarf.extractall(src_dir) tarf.close() shutil.rmtree(src_dir) print("Finished downloading CIFAR-10.")
Example #16
Source File: dogs.py From vergeml with MIT License | 5 votes |
def __call__(self, args, env): samples_dir = env.get('samples-dir') for label in ['affenpinscher', 'afghan-hound', 'african-hunting-dog', 'airedale', 'american-staffordshire-terrier', 'appenzeller', 'australian-terrier', 'basenji', 'basset', 'beagle', 'bedlington-terrier', 'bernese-mountain-dog', 'black-and-tan-coonhound', 'blenheim-spaniel', 'bloodhound', 'bluetick', 'border-collie', 'border-terrier', 'borzoi', 'boston-bull', 'bouvier-des-flandres', 'boxer', 'brabancon-griffon', 'briard', 'brittany-spaniel', 'bull-mastiff', 'cairn', 'cardigan', 'chesapeake-bay-retriever', 'chihuahua', 'chow', 'clumber', 'cocker-spaniel', 'collie', 'curly-coated-retriever', 'dandie-dinmont', 'dhole', 'dingo', 'doberman', 'english-foxhound', 'english-setter', 'english-springer', 'entlebucher', 'eskimo-dog', 'flat-coated-retriever', 'french-bulldog', 'german-shepherd', 'german-short-haired-pointer', 'giant-schnauzer', 'golden-retriever', 'gordon-setter', 'great-dane', 'great-pyrenees', 'greater-swiss-mountain-dog', 'groenendael', 'ibizan-hound', 'irish-setter', 'irish-terrier', 'irish-water-spaniel', 'irish-wolfhound', 'italian-greyhound', 'japanese-spaniel', 'keeshond', 'kelpie', 'kerry-blue-terrier', 'komondor', 'kuvasz', 'labrador-retriever', 'lakeland-terrier', 'leonberg', 'lhasa', 'malamute', 'malinois', 'maltese-dog', 'mexican-hairless', 'miniature-pinscher', 'miniature-poodle', 'miniature-schnauzer', 'newfoundland', 'norfolk-terrier', 'norwegian-elkhound', 'norwich-terrier', 'old-english-sheepdog', 'otterhound', 'papillon', 'pekinese', 'pembroke', 'pomeranian', 'pug', 'redbone', 'rhodesian-ridgeback', 'rottweiler', 'saint-bernard', 'saluki', 'samoyed', 'schipperke', 'scotch-terrier', 'scottish-deerhound', 'sealyham-terrier', 'shetland-sheepdog', 'shih-tzu', 'siberian-husky', 'silky-terrier', 'soft-coated-wheaten-terrier', 'staffordshire-bullterrier', 'standard-poodle', 'standard-schnauzer', 'sussex-spaniel', 'tibetan-mastiff', 'tibetan-terrier', 'toy-poodle', 'toy-terrier', 'vizsla', 'walker-hound', 'weimaraner', 'welsh-springer-spaniel', 'west-highland-white-terrier', 'whippet', 'wire-haired-fox-terrier', 'yorkshire-terrier']: dest = os.path.join(samples_dir, label) if os.path.exists(dest): raise VergeMLError("Directory {} already exists in samples dir: {}".format(label, dest)) print("Downloading Stanford dogs to {}.".format(samples_dir)) src_dir = self.download_files([_URL], env=env, dir=env.get('cache-dir')) path = os.path.join(src_dir, "images.tar") print("Extracting data...") tarf = tarfile.TarFile(path, 'r') tarf.extractall(src_dir) tarf.close() for src_file in os.listdir(os.path.join(src_dir, "Images")): if src_file.startswith("."): continue _, dest_file = src_file.split("-", maxsplit=1) dest_file = dest_file.replace("_", "-") dest_file = dest_file.lower() shutil.copytree(os.path.join(src_dir, "Images", src_file), os.path.join(samples_dir, dest_file)) shutil.rmtree(src_dir) print("Finished downloading Stanford dogs.")
Example #17
Source File: svhn.py From vergeml with MIT License | 5 votes |
def download(env): """The Street View House Numbers (SVHN) Dataset. SVHN is a real-world image dataset for developing machine learning and object recognition algorithms with minimal requirement on data preprocessing and formatting. It can be seen as similar in flavor to MNIST (e.g., the images are of small cropped digits), but incorporates an order of magnitude more labeled data (over 600,000 digit images) and comes from a significantly harder, unsolved, real world problem (recognizing digits and numbers in natural scene images). SVHN is obtained from house numbers in Google Street View images. Authors: Yuval Netzer, Tao Wang, Adam Coates, Alessandro Bissacco, Bo Wu, Andrew Y. Ng Reading Digits in Natural Images with Unsupervised Feature Learning NIPS Workshop on Deep Learning and Unsupervised Feature Learning 2011. http://ufldl.stanford.edu/housenumbers/nips2011_housenumbers.pdf For more information visit: http://ufldl.stanford.edu/housenumbers/""" urls = ["http://ufldl.stanford.edu/housenumbers/train_32x32.mat", "http://ufldl.stanford.edu/housenumbers/test_32x32.mat"] samples_dir = env.get('base.samples_dir') print("Downloading SVHN to {}.".format(samples_dir)) src_dir = download_files(urls, dir=env.get('base.cache_dir')) for file in ("train_32x32.mat", "test_32x32.mat", ): shutil.copy(os.path.join(src_dir, file), samples_dir) shutil.rmtree(src_dir) print("Finished downloading SVHN.")
Example #18
Source File: mnist.py From vergeml with MIT License | 5 votes |
def download(env): """The MNIST database of handwritten digits. The MNIST database of handwritten digits has a training set of 60,000 examples, and a test set of 10,000 examples. It is a subset of a larger set available from NIST. The digits have been size-normalized and centered in a fixed-size image. It is a good database for people who want to try learning techniques and pattern recognition methods on real-world data while spending minimal efforts on preprocessing and formatting. Authors: Yann LeCun, Courant Institute, NYU Corinna Cortes, Google Labs, New York Christopher J.C. Burges, Microsoft Research, Redmond For more information visit:http://yann.lecun.com/exdb/mnist/""" urls = ["http://yann.lecun.com/exdb/mnist/train-images-idx3-ubyte.gz", "http://yann.lecun.com/exdb/mnist/train-labels-idx1-ubyte.gz", "http://yann.lecun.com/exdb/mnist/t10k-images-idx3-ubyte.gz", "http://yann.lecun.com/exdb/mnist/t10k-labels-idx1-ubyte.gz"] samples_dir = env.get('base.samples_dir') print("Downloading mnist to {}.".format(samples_dir)) src_dir = download_files(urls, dir=env.get('base.cache_dir')) for file in ("train-images-idx3-ubyte.gz", "train-labels-idx1-ubyte.gz", "t10k-images-idx3-ubyte.gz", "t10k-labels-idx1-ubyte.gz"): shutil.copy(os.path.join(src_dir, file), samples_dir) shutil.rmtree(src_dir) print("Finished downloading mnist.")
Example #19
Source File: options.py From arm_now with MIT License | 5 votes |
def clean(config): """ Clean the filesystem. """ os.unlink(config.KERNEL) os.unlink(config.DTB) os.unlink(config.ROOTFS) shutil.rmtree(config.DIR, ignore_errors=True)
Example #20
Source File: test_calc_basic.py From aospy with Apache License 2.0 | 5 votes |
def _clean_test_direcs(): for direc in [example_proj.direc_out, example_proj.tar_direc_out]: try: shutil.rmtree(direc) except OSError: pass
Example #21
Source File: test_automate.py From aospy with Apache License 2.0 | 5 votes |
def calcsuite_init_specs_single_calc(calcsuite_init_specs): specs = calcsuite_init_specs.copy() specs['variables'] = [condensation_rain] specs['regions'] = [None] specs['output_time_regional_reductions'] = ['av'] yield specs # Teardown procedure for direc in [example_proj.direc_out, example_proj.tar_direc_out]: shutil.rmtree(direc, ignore_errors=True)
Example #22
Source File: test_automate.py From aospy with Apache License 2.0 | 5 votes |
def calcsuite_init_specs_two_calcs(calcsuite_init_specs): specs = calcsuite_init_specs.copy() specs['variables'] = [condensation_rain, convection_rain] specs['regions'] = [None] specs['output_time_regional_reductions'] = ['av'] yield specs # Teardown procedure for direc in [example_proj.direc_out, example_proj.tar_direc_out]: shutil.rmtree(direc, ignore_errors=True)
Example #23
Source File: bindiff.py From BASS with GNU General Public License v2.0 | 5 votes |
def bindiff_pickle_export(self, sample, is_64_bit = True, timeout = None): """ Load a sample into IDA Pro, perform autoanalysis and export a pickle file. :param sample: The sample's path :param is_64_bit: If the sample needs to be analyzed by the 64 bit version of IDA :param timeout: Timeout for the analysis in seconds :return: The file name of the exported pickle database. The file needs to be deleted by the caller. Returns None on error. """ data_to_send = { "timeout": timeout, "is_64_bit": is_64_bit} url = "%s/binexport_pickle" % next(self._urls) log.debug("curl -XPOST --data '%s' '%s'", json.dumps(data_to_send), url) response = requests.post(url, data = data_to_send, files = {os.path.basename(sample): open(sample, "rb")}) if response.status_code == 200: handle_tar, path_tar = tempfile.mkstemp(suffix = ".tar.gz") with os.fdopen(handle_tar, "wb") as f: map(f.write, response.iter_content(1024)) directory = tempfile.mkdtemp() subprocess.check_call(["tar", "xf", path_tar], cwd = directory) handle_bindiff, output_bindiff = tempfile.mkstemp(suffix = ".BinExport") with os.fdopen(handle_bindiff, "wb") as f: with open(os.path.join(directory, "output.BinExport"), "rb") as f2: shutil.copyfileobj(f2, f) handle_pickle, output_pickle = tempfile.mkstemp(suffix = ".pickle") with os.fdopen(handle_pickle, "wb") as f: with open(os.path.join(directory, "output.pickle"), "rb") as f2: shutil.copyfileobj(f2, f) os.unlink(path_tar) shutil.rmtree(directory) return output_bindiff, output_pickle else: log.error("Bindiff server responded with status code %d: %s", response.status_code, response.content) return None
Example #24
Source File: ida_service.py From BASS with GNU General Public License v2.0 | 5 votes |
def bindiff_export(): """ Run the IDA Pro autoanalysis on the input file and export a BinExport database. :param input: The input file :return: Status code 200 and a JSON object containing the output database name in key 'output', or status code 422 on invalid parameters, 408 on timeout or 500 on other errors. """ logger.info("bindiff_export called") directory = None try: directory = tempfile.mkdtemp() if len(request.files) != 1: return make_response(jsonify(error = "Missing file parameter"), 422) filename, file_ = request.files.items()[0] input_ = os.path.join(directory, sanitize_filename(filename)) file_.save(input_) output = os.path.join(directory, "output.BinExport") timeout = request.form.get('timeout', None) is_64_bit = request.form.get('is_64_bit', True) try: run_ida(input_, is_64_bit, timeout, os.path.join(PREFIX, "export_binexport_pickle.py"), "binexport", output) logger.info("Command completed successfully") return send_file(open(output, "rb"), as_attachment = True, attachment_filename = "%s.BinExport" % filename, mimetype = "application/binary") except TimeoutError: return jsonify(error = "Program execution timed out"), 408 except OSError as err: return jsonify(error = "Program execution failed with error %d" % err.errno), 500 finally: if directory is not None: shutil.rmtree(directory)
Example #25
Source File: ida_service.py From BASS with GNU General Public License v2.0 | 5 votes |
def bindiff_pickle_export(): """ Run the IDA Pro autoanalysis on the input file and export a BinExport database. :param input: The input file :return: Status code 200 and a JSON object containing the output database name in key 'output', or status code 422 on invalid parameters, 408 on timeout or 500 on other errors. """ logger.info("bindiff_pickle_export called") directory = None try: directory = tempfile.mkdtemp() if len(request.files) != 1: return make_response(jsonify(error = "Missing file parameter"), 422) filename, file_ = request.files.items()[0] input_ = os.path.join(directory, sanitize_filename(filename)) file_.save(input_) output_binexport = os.path.join(directory, "output.BinExport") output_pickle = os.path.join(directory, "output.pickle") timeout = request.form.get('timeout', None) is_64_bit = request.form.get('is_64_bit', True) try: run_ida(input_, is_64_bit, timeout, os.path.join(PREFIX, "export_binexport_pickle.py"), "binexport_pickle", output_binexport, output_pickle) logger.info("Command completed successfully") output_tar = os.path.join(directory, "output.tar.gz") subprocess.check_call(["tar", "czf", output_tar, os.path.relpath(output_binexport, directory), os.path.relpath(output_pickle, directory)], cwd = directory) return send_file(open(output_tar, "rb"), as_attachment = True, attachment_filename = "%s.tar.gz" % filename, mimetype = "application/gzip") except TimeoutError: return jsonify(error = "Program execution timed out"), 408 except OSError as err: return jsonify(error = "Program execution failed with error %d" % err.errno), 500 finally: if directory is not None: shutil.rmtree(directory)
Example #26
Source File: ida_service.py From BASS with GNU General Public License v2.0 | 5 votes |
def pickle_export(): """ Run the IDA Pro autoanalysis on the input file and export a BinExport database. :param input: The input file :return: Status code 200 and a JSON object containing the output database name in key 'output', or status code 422 on invalid parameters, 408 on timeout or 500 on other errors. """ logger.info("bindiff_export called") directory = None try: directory = tempfile.mkdtemp() if len(request.files) != 1: return make_response(jsonify(error = "Missing file parameter"), 422) filename, file_ = request.files.items()[0] input_ = os.path.join(directory, sanitize_filename(filename)) file_.save(input_) output = os.path.join(directory, "output.pickle") timeout = request.form.get('timeout', None) is_64_bit = request.form.get('is_64_bit', False) try: run_ida(input_, is_64_bit, timeout, os.path.join(PREFIX, "export_binexport_pickle.py"), "pickle", output) logger.info("Command completed successfully") return send_file(open(output, "rb"), as_attachment = True, attachment_filename = "%s.pickle" % filename, mimetype = "application/binary") except TimeoutError: return jsonify(error = "Program execution timed out"), 408 except OSError as err: return jsonify(error = "Program execution failed with error %d" % err.errno), 500 finally: if directory is not None: shutil.rmtree(directory)
Example #27
Source File: ida_service.py From BASS with GNU General Public License v2.0 | 5 votes |
def bindiff_compare(): logger.info("bindiff_compare called") input_dir = tempfile.mkdtemp() output_dir = tempfile.mkdtemp() try: primary = os.path.join(input_dir, "primary") secondary = os.path.join(input_dir, "secondary") try: request.files["primary"].save(primary) request.files["secondary"].save(secondary) except KeyError: return make_response(jsonify(error="Missing parameter 'primary' or 'secondary'"), 422) timeout = request.form.get('timeout', None) cmd = (BINDIFF_DIFFER, "--primary", primary, "--secondary", secondary, "--output_dir", output_dir) logger.info("Executing %s", " ".join("'%s'" % x for x in cmd)) check_call(cmd, cwd = output_dir, timeout = timeout) db_path = [os.path.join(output_dir, x) for x in os.listdir(output_dir)] if len(db_path) != 1: return make_response(jsonify(error = "BinDiff generated 0 or several output files"), 500) return send_file(open(db_path[0], "rb"), as_attachment = True, attachment_filename = "BinDiff.sqlite3", mimetype = "application/binary") except OSError as err: if err.errno == -9: return make_response(jsonify(error = "Program execution timed out"), 408) else: return make_response(jsonify(error = "Program execution failed with error %d" % err.errno), 500) finally: shutil.rmtree(input_dir) shutil.rmtree(output_dir)
Example #28
Source File: ida_service.py From BASS with GNU General Public License v2.0 | 5 votes |
def bindiff_export(): """ Run the IDA Pro autoanalysis on the input file and export a BinExport database. :param input: The input file :return: Status code 200 and a JSON object containing the output database name in key 'output', or status code 422 on invalid parameters, 408 on timeout or 500 on other errors. """ logger.info("bindiff_export called") directory = None try: directory = tempfile.mkdtemp() if len(request.files) != 1: return make_response(jsonify(error = "Missing file parameter"), 422) filename, file_ = request.files.items()[0] input_ = os.path.join(directory, sanitize_filename(filename)) file_.save(input_) output = os.path.join(directory, "output.BinExport") timeout = request.form.get('timeout', None) is_64_bit = request.form.get('is_64_bit', True) try: run_ida(input_, is_64_bit, timeout, os.path.join(PREFIX, "export_binexport_pickle.py"), "binexport", output) logger.info("Command completed successfully") return send_file(open(output, "rb"), as_attachment = True, attachment_filename = "%s.BinExport" % filename, mimetype = "application/binary") except TimeoutError: return jsonify(error = "Program execution timed out"), 408 except OSError as err: return jsonify(error = "Program execution failed with error %d" % err.errno), 500 finally: if directory is not None: shutil.rmtree(directory)
Example #29
Source File: ida_service.py From BASS with GNU General Public License v2.0 | 5 votes |
def bindiff_pickle_export(): """ Run the IDA Pro autoanalysis on the input file and export a BinExport database. :param input: The input file :return: Status code 200 and a JSON object containing the output database name in key 'output', or status code 422 on invalid parameters, 408 on timeout or 500 on other errors. """ logger.info("bindiff_pickle_export called") directory = None try: directory = tempfile.mkdtemp() if len(request.files) != 1: return make_response(jsonify(error = "Missing file parameter"), 422) filename, file_ = request.files.items()[0] input_ = os.path.join(directory, sanitize_filename(filename)) file_.save(input_) output_binexport = os.path.join(directory, "output.BinExport") output_pickle = os.path.join(directory, "output.pickle") timeout = request.form.get('timeout', None) is_64_bit = request.form.get('is_64_bit', True) try: run_ida(input_, is_64_bit, timeout, os.path.join(PREFIX, "export_binexport_pickle.py"), "binexport_pickle", output_binexport, output_pickle) logger.info("Command completed successfully") output_tar = os.path.join(directory, "output.tar.gz") subprocess.check_call(["tar", "czf", output_tar, os.path.relpath(output_binexport, directory), os.path.relpath(output_pickle, directory)], cwd = directory) return send_file(open(output_tar, "rb"), as_attachment = True, attachment_filename = "%s.tar.gz" % filename, mimetype = "application/gzip") except TimeoutError: return jsonify(error = "Program execution timed out"), 408 except OSError as err: return jsonify(error = "Program execution failed with error %d" % err.errno), 500 finally: if directory is not None: shutil.rmtree(directory)
Example #30
Source File: ida_service.py From BASS with GNU General Public License v2.0 | 5 votes |
def pickle_export(): """ Run the IDA Pro autoanalysis on the input file and export a BinExport database. :param input: The input file :return: Status code 200 and a JSON object containing the output database name in key 'output', or status code 422 on invalid parameters, 408 on timeout or 500 on other errors. """ logger.info("bindiff_export called") directory = None try: directory = tempfile.mkdtemp() if len(request.files) != 1: return make_response(jsonify(error = "Missing file parameter"), 422) filename, file_ = request.files.items()[0] input_ = os.path.join(directory, sanitize_filename(filename)) file_.save(input_) output = os.path.join(directory, "output.pickle") timeout = request.form.get('timeout', None) is_64_bit = request.form.get('is_64_bit', False) try: run_ida(input_, is_64_bit, timeout, os.path.join(PREFIX, "export_binexport_pickle.py"), "pickle", output) logger.info("Command completed successfully") return send_file(open(output, "rb"), as_attachment = True, attachment_filename = "%s.pickle" % filename, mimetype = "application/binary") except TimeoutError: return jsonify(error = "Program execution timed out"), 408 except OSError as err: return jsonify(error = "Program execution failed with error %d" % err.errno), 500 finally: if directory is not None: shutil.rmtree(directory)