Python os.path.getsize() Examples

The following are 40 code examples for showing how to use os.path.getsize(). These examples are extracted from open source projects. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example.

You may want to check out the right sidebar which shows the related API usage.

You may also want to check out all available functions/classes of the module os.path , or try the search function .

Example 1
Project: SecPi   Author: SecPi   File: alarmdata.py    License: GNU General Public License v3.0 6 votes vote down vote up
def list(self):
		dirs = []
		# TODO: error management
		for d in listdir(self.datapath):
			dp = path.join(self.datapath, d)
			if path.isdir(dp):
				dirs.append({
					"name": d,
					"path": dp,
					"mtime": datetime.datetime.fromtimestamp(path.getmtime(dp)).strftime('%d.%m.%Y %H:%M:%S')
					# "size": path.getsize(dp),
					# "hsize": self.human_size(self.get_size(dp))
				})
		
		dirs.sort(key=lambda dir: dir['name'], reverse=True)
		
		return {'status': 'success', 'data': dirs} 
Example 2
Project: End-to-end-ASR-Pytorch   Author: Alexander-H-Liu   File: librispeech.py    License: MIT License 6 votes vote down vote up
def __init__(self, path, split, tokenizer, bucket_size, ascending=False):
        # Setup
        self.path = path
        self.bucket_size = bucket_size

        # List all wave files
        file_list = []
        for s in split:
            split_list = list(Path(join(path, s)).rglob("*.flac"))
            assert len(split_list) > 0, "No data found @ {}".format(join(path,s))
            file_list += split_list
        # Read text
        text = Parallel(n_jobs=READ_FILE_THREADS)(
            delayed(read_text)(str(f)) for f in file_list)
        #text = Parallel(n_jobs=-1)(delayed(tokenizer.encode)(txt) for txt in text)
        text = [tokenizer.encode(txt) for txt in text]

        # Sort dataset by text length
        #file_len = Parallel(n_jobs=READ_FILE_THREADS)(delayed(getsize)(f) for f in file_list)
        self.file_list, self.text = zip(*[(f_name, txt)
                                          for f_name, txt in sorted(zip(file_list, text), reverse=not ascending, key=lambda x:len(x[1]))]) 
Example 3
Project: web2board   Author: bq   File: downloader.py    License: GNU Lesser General Public License v3.0 6 votes vote down vote up
def verify(self, sha1=None):
        _dlsize = getsize(self._destination)
        if _dlsize != self.get_size():
            raise FDSizeMismatch(_dlsize, self._fname, self.get_size())

        if not sha1:
            return

        dlsha1 = None
        try:
            result = util.exec_command(["sha1sum", self._destination])
            dlsha1 = result['out']
        except OSError:
            try:
                result = util.exec_command(
                    ["shasum", "-a", "1", self._destination])
                dlsha1 = result['out']
            except OSError:
                pass

        if dlsha1:
            dlsha1 = dlsha1[1:41] if dlsha1.startswith("\\") else dlsha1[:40]
            if sha1 != dlsha1:
                raise FDSHASumMismatch(dlsha1, self._fname, sha1) 
Example 4
Project: GutterColor   Author: ggordan   File: gutter_color.py    License: MIT License 6 votes vote down vote up
def clear_cache(force = False):
  """
  If the folder exists, and has more than 5MB of icons in the cache, delete
  it to clear all the icons then recreate it.
  """
  from os.path import getsize, join, isfile, exists
  from os import makedirs, listdir
  from sublime import cache_path
  from shutil import rmtree

  # The icon cache path
  icon_path = join(cache_path(), "GutterColor")

  # The maximum amount of space to take up
  limit = 5242880 # 5 MB

  if exists(icon_path):
    size = sum(getsize(join(icon_path, f)) for f in listdir(icon_path) if isfile(join(icon_path, f)))
    if force or (size > limit): rmtree(icon_path)

  if not exists(icon_path): makedirs(icon_path) 
Example 5
Project: seisflows   Author: rmodrak   File: fortran_binary.py    License: BSD 2-Clause "Simplified" License 6 votes vote down vote up
def _read(filename):
    """ Reads Fortran style binary data into numpy array
    """
    nbytes = getsize(filename)
    with open(filename, 'rb') as file:
        # read size of record
        file.seek(0)
        n = np.fromfile(file, dtype='int32', count=1)[0]
        if n == nbytes-8:
            file.seek(4)
            data = np.fromfile(file, dtype='float32')
            return data[:-1]
        else:
            file.seek(0)
            data = np.fromfile(file, dtype='float32')
            return data 
Example 6
Project: airflow   Author: apache   File: dynamodb_to_s3.py    License: Apache License 2.0 6 votes vote down vote up
def _scan_dynamodb_and_upload_to_s3(self, temp_file, scan_kwargs, table):
        while True:
            response = table.scan(**scan_kwargs)
            items = response['Items']
            for item in items:
                temp_file.write(self.process_func(item))

            if 'LastEvaluatedKey' not in response:
                # no more items to scan
                break

            last_evaluated_key = response['LastEvaluatedKey']
            scan_kwargs['ExclusiveStartKey'] = last_evaluated_key

            # Upload the file to S3 if reach file size limit
            if getsize(temp_file.name) >= self.file_size:
                _upload_file_to_s3(temp_file, self.s3_bucket_name,
                                   self.s3_key_prefix)
                temp_file.close()
                temp_file = NamedTemporaryFile()
        return temp_file 
Example 7
Project: ssbio   Author: SBRG   File: test_protein_seqprop.py    License: MIT License 6 votes vote down vote up
def test_write_gff_file(self, seqprop_with_i, tmpdir):
        """Test writing the features, and that features are now loaded from a file"""
        outpath = tmpdir.join('test_seqprop_with_i_write_gff_file.gff').strpath
        seqprop_with_i.write_gff_file(outfile=outpath, force_rerun=True)

        # Test that the file was written
        assert op.exists(outpath)
        assert op.getsize(outpath) > 0

        # Test that file paths are correct
        assert seqprop_with_i.feature_path == outpath
        assert seqprop_with_i.feature_file == 'test_seqprop_with_i_write_gff_file.gff'
        assert seqprop_with_i.feature_dir == tmpdir

        # Test that features cannot be changed
        with pytest.raises(ValueError):
            seqprop_with_i.features = ['NOFEATURES'] 
Example 8
Project: hacker-scripts   Author: restran   File: dir_compare.py    License: MIT License 6 votes vote down vote up
def traverse_dir(path):
    file_dict = {}
    dir_dict = {}
    count = 1
    for root, dirs, files in walk(path):
        for d in dirs:
            abs_p = join(root, d)
            dir_dict[abs_p] = 0
            print(abs_p)
            count += 1
            if count % 200 == 0:
                print('%s files scanned' % count)

        for f in files:
            abs_p = join(root, f)
            file_dict[abs_p] = getsize(abs_p)
            print(abs_p)
            count += 1
            if count % 200 == 0:
                print('%s files scanned' % count)

    return file_dict, dir_dict 
Example 9
Project: cos-python-sdk-v4   Author: tencentyun   File: cos_common.py    License: Apache License 2.0 6 votes vote down vote up
def get_sha1_by_slice(file_name, slice_size):
        """ Get SHA array based on Qcloud Slice Upload Interface

        :param file_name: local file path
        :param slice_size: slice size in bit
        :return: sha array like [{“offset”:0, “datalen”:1024,”datasha”:”aaa”}, {}, {}]
        """
        from os import path

        with open(file_name, 'rb') as f:

            result = []
            file_size = path.getsize(file_name)
            sha1_obj = Sha1Hash()
            for current_offset in range(0, file_size, slice_size):

                data_length = min(slice_size, file_size - current_offset)
                sha1_obj.update(f.read(data_length))
                sha1_val = sha1_obj.inner_digest()
                result.append({"offset": current_offset, "datalen": data_length, "datasha": sha1_val})

            result[-1]['datasha'] = sha1_obj.hexdigest()
            return result 
Example 10
Project: p2ptv-pi   Author: alesnav   File: Storage.py    License: MIT License 6 votes vote down vote up
def _close(self, file):
        f = self.handles[file]
        del self.handles[file]
        if self.whandles.has_key(file):
            del self.whandles[file]
            f.flush()
            self.unlock_file(file, f)
            f.close()
            if os.path.isfile(file):
                self.tops[file] = getsize(file)
                self.mtimes[file] = getmtime(file)
            else:
                if DEBUG:
                    log(self.log_prefix + '_close: missing file', file)
                self.tops[file] = 0
                self.mtimes[file] = 0
        else:
            if self.lock_while_reading:
                self.unlock_file(file, f)
            f.close() 
Example 11
Project: p2ptv-pi   Author: alesnav   File: Storage.py    License: MIT License 6 votes vote down vote up
def enable_file(self, f):
        if self.config['encrypted_storage']:
            return
        if not self.disabled[f]:
            return
        self.disabled[f] = False
        r = self.file_ranges[f]
        if not r:
            return
        file = r[3]
        if not exists(file):
            h = open(file, 'wb+')
            h.flush()
            h.close()
        if not self.tops.has_key(file):
            self.tops[file] = getsize(file)
        if not self.mtimes.has_key(file):
            self.mtimes[file] = getmtime(file)
        self.working_ranges[f] = [r]
        if DEBUG:
            log(self.log_prefix + 'enable_file: f:', f, 'self.working_ranges:', self.working_ranges) 
Example 12
Project: p2ptv-pi   Author: alesnav   File: Storage.py    License: MIT License 6 votes vote down vote up
def disable_file(self, f):
        if self.config['encrypted_storage']:
            return
        if self.disabled[f]:
            return
        self.disabled[f] = True
        r = self._get_disabled_ranges(f)
        if not r:
            return
        for file, begin, end in r[2]:
            if not os.path.isdir(self.bufferdir):
                os.makedirs(self.bufferdir)
            if not exists(file):
                h = open(file, 'wb+')
                h.flush()
                h.close()
            if not self.tops.has_key(file):
                self.tops[file] = getsize(file)
            if not self.mtimes.has_key(file):
                self.mtimes[file] = getmtime(file)

        self.working_ranges[f] = r[0] 
Example 13
Project: p2ptv-pi   Author: alesnav   File: Storage.py    License: MIT License 6 votes vote down vote up
def pickle(self):
        files = []
        pfiles = []
        for i in xrange(len(self.files)):
            if not self.files[i][1]:
                continue
            if self.disabled[i]:
                for file, start, end in self._get_disabled_ranges(i)[2]:
                    pfiles.extend([basename(file), getsize(file), getmtime(file)])

                continue
            file = self.files[i][0].encode('utf-8')
            files.extend([i, getsize(file), getmtime(file)])

        return {'files': files,
         'partial files': pfiles} 
Example 14
Project: PyGPS   Author: gregstarr   File: readRinexObs.py    License: GNU Affero General Public License v3.0 6 votes vote down vote up
def rinexobs(obsfn,writeh5=None,maxtimes=None):
    stem,ext = splitext(expanduser(obsfn))
    if ext[-1].lower() == 'o': #raw text file
        with open(obsfn,'r') as f:
            t=time.time()
            lines = f.read().splitlines(True)
            lines.append('')
            header,version,headlines,obstimes,sats,svset = scan(lines)
            print('{} is a RINEX {} file, {} kB.'.format(obsfn,version,getsize(obsfn)/1000.0))
            data = processBlocks(lines,header,obstimes,svset,headlines,sats)
            print("finished in {0:.2f} seconds".format(time.time()-t))
    #%% save to disk (optional)
        if writeh5:
            h5fn = stem + '.h5'
            print('saving OBS data to {}'.format(h5fn))
            data.to_hdf(h5fn,key='OBS',mode='a',complevel=6,append=False)
    elif ext.lower() == '.h5':
        data = read_hdf(obsfn,key='OBS')
        print('loaded OBS data from {} to {}'.format(blocks.items[0],blocks.items[-1]))
    return data


# this will scan the document for the header info and for the line on
# which each block starts 
Example 15
Project: xbmc.service.pushbullet   Author: elbowz   File: limpp.py    License: GNU General Public License v3.0 6 votes vote down vote up
def __init__(self,addr=0,size=None,file=None,process=True):
        self.init()
        if not size:
            size = ospath_getsize(file)
        self.file_size = size
        self.file = file
        self.addr = addr
        self.size = size
        self.type = 'TGA'
        self.pallete = None
        self.Read_header()
        self.flipped = False
        if self.header['ImageDescriptor'] & 32:
            self.flipped = True
        self.alpha_bits = self.header['ImageDescriptor'] & 15
        self.Get_image_dimensions()
        self.size_of_plane = self.width * self.height
        self.sourceBpp = self.header['BitsPerPixel']/8
        self.data_size = self.width * self.height * self.sourceBpp
        self.Bps = self.width * self.sourceBpp
        self.Bpp = 4
        self.final_size = self.size_of_plane * self.Bpp
        self.RGBA = None
        if not process: return
        self.Process() 
Example 16
Project: xbmc.service.pushbullet   Author: elbowz   File: limpp.py    License: GNU General Public License v3.0 6 votes vote down vote up
def __init__(self,addr=0,size=None,file=None,process=True,options=None):
        self.default_icon = None
        if options:
            if options.has_key('icon'):
                self.default_icon = options['icon']
        self.init()
        if not size:
            size = ospath_getsize(file)
        self.file_size = size
        self.file = file
        self.addr = addr
        self.size = size
        self.type = 'ICO'
        self.Read_header()
        self.final_size = self.width * self.height * 4
        if process: self.Process() 
Example 17
Project: xbmc.service.pushbullet   Author: elbowz   File: limpp.py    License: GNU General Public License v3.0 6 votes vote down vote up
def __init__(self,addr=0,size=None,file=None,process=True):
        self.init()
        import TonyJpegDecoder
        if not size:
            size = ospath_getsize(file)
        self.file_size = size
        self.file = file
        self.addr = addr
        self.size = size
        self.type = 'JPG'
        self.header = {}
        self.decoder = TonyJpegDecoder.TonyJpegDecoder()
        self.data = self.decoder.DecompressImage(openfile(file,'rb').read())
        self.width = self.decoder.Width
        self.height = self.decoder.Height
        self.size_of_plane = self.width * self.height
        self.sourceBpp = 3
        self.size = size
        self.Bps = self.width * self.sourceBpp
        self.Bpp = 4
        self.final_size = self.size_of_plane * self.Bpp
        self.RGBA = None
        if not process: return
        self.Process_data() 
Example 18
Project: xbmc.service.pushbullet   Author: elbowz   File: limpp.py    License: GNU General Public License v3.0 6 votes vote down vote up
def __init__(self,addr=0,size=None,file=None,process=True):
        self.init()
        if not size:
            size = ospath_getsize(file)
        self.file_size = size
        self.file = file
        self.addr = addr
        self.size = size
        self.type = 'XPM'
        self.Read_header()
        self.size_of_plane = self.width * self.height
        self.Bps = self.width * self.sourceBpp
        self.Bpp = 4
        self.final_size = self.size_of_plane * self.Bpp
        self.RGBA = None
        if not process: return
        self.Process() 
Example 19
Project: xbmc.service.pushbullet   Author: elbowz   File: limpp.py    License: GNU General Public License v3.0 6 votes vote down vote up
def __init__(self,addr=0,size=None,file=None,process=True):
        self.init()
        if not size:
            size = ospath_getsize(file)
        self.file_size = size
        self.file = file
        self.addr = addr
        self.size = size
        self.type = 'XBM'
        self.Read_header()
        self.size_of_plane = self.width * self.height
        self.Bps = self.width * self.sourceBpp
        self.Bpp = 4
        self.final_size = self.size_of_plane * self.Bpp
        self.RGBA = None
        if not process: return
        self.Process()
        self.RGBA.mono = True 
Example 20
Project: downloader-cli   Author: deepjyoti30   File: download.py    License: MIT License 6 votes vote down vote up
def _parse_exists(self):
        """This function should be called if the file already exists.

        In that case there are two possibilities, it's partially downloaded
        or it's a proper file.
        """
        if self.overwrite:
            return
        elif self.continue_download:
            cur_size = path.getsize(self.des)
            original_size = urllib.request.urlopen(self.URL).info()[
                'Content-Length']

            if original_size is None:
                print("WARNING: Could not perform sanity check on partial download.",
                    file=self.ostream)
                self._build_headers(cur_size)
            elif cur_size < int(original_size):
                self._build_headers(cur_size)
        else:
            print("ERROR: File exists. See 'dw --help' for solutions.", file=self.ostream)
            exit(-1) 
Example 21
Project: rupo   Author: IlyaGusev   File: tqdm_open.py    License: Apache License 2.0 6 votes vote down vote up
def tqdm_open(filename, encoding='utf8'):
    """
    Открытие файла, обёрнутое в tqdm
    """
    total = getsize(filename)

    def wrapped_line_iterator(fd):
        with tqdm(total=total, unit="B", unit_scale=True, desc=basename(filename), miniters=1) as pb:
            processed_bytes = 0
            for line in fd:
                processed_bytes += len(line)
                if processed_bytes >= 1024 * 1024:
                    pb.update(processed_bytes)
                    processed_bytes = 0
                yield line
            pb.update(processed_bytes)

    with open(filename, encoding=encoding) as fd:
        yield wrapped_line_iterator(fd) 
Example 22
Project: pyprophet   Author: PyProphet   File: data_handling.py    License: BSD 3-Clause "New" or "Revised" License 6 votes vote down vote up
def is_sqlite_file(filename):
    # https://stackoverflow.com/questions/12932607/how-to-check-with-python-and-sqlite3-if-one-sqlite-database-file-exists
    from os.path import isfile, getsize

    if not isfile(filename):
        return False
    if getsize(filename) < 100: # SQLite database file header is 100 bytes
        return False

    with open(filename, 'rb') as fd:
        header = fd.read(100)

    if 'SQLite format 3' in str(header):
        return True
    else:
        return False 
Example 23
Project: SecPi   Author: SecPi   File: alarmdata.py    License: GNU General Public License v3.0 5 votes vote down vote up
def get_size(self, start_path):
		total_size = 0
		for dirpath, dirnames, filenames in walk(start_path):
			for f in filenames:
				fp = path.join(dirpath, f)
				total_size += path.getsize(fp)
				
		return total_size 
Example 24
Project: password_pwncheck   Author: CboeSecurity   File: pwned-password-server.py    License: MIT License 5 votes vote down vote up
def __init__(self,filepath,debug=False):
        self.filepath = filepath
        self.fp = open(self.filepath,'r')

        first = self.fp.readline()
        self.linesize = len(first)
        self.nows_linesize = len(first.strip())
        self.filesize = getsize(self.filepath)
        self.filerows = self.filesize/self.linesize
        self.debug = debug 
Example 25
Project: password_pwncheck   Author: CboeSecurity   File: pwnpass.py    License: MIT License 5 votes vote down vote up
def __init__(self,filepath,debug=False):
        self.filepath = filepath
        self.fp = open(self.filepath,'r')

        first = self.fp.readline()
        self.linesize = len(first)
        self.nows_linesize = len(first.strip())
        self.filesize = getsize(self.filepath)
        self.filerows = self.filesize/self.linesize
        self.debug = debug 
Example 26
Project: thingsboard-gateway   Author: thingsboard   File: tb_gateway_service.py    License: Apache License 2.0 5 votes vote down vote up
def __load_persistent_devices(self):
        devices = {}
        if self.__connected_devices_file in listdir(self._config_dir) and \
                path.getsize(self._config_dir + self.__connected_devices_file) > 0:
            try:
                with open(self._config_dir + self.__connected_devices_file) as devices_file:
                    devices = load(devices_file)
            except Exception as e:
                log.exception(e)
        else:
            connected_devices_file = open(self._config_dir + self.__connected_devices_file, 'w')
            connected_devices_file.close()

        if devices is not None:
            log.debug("Loaded devices:\n %s", devices)
            for device_name in devices:
                try:
                    if self.available_connectors.get(devices[device_name]):
                        self.__connected_devices[device_name] = {
                            "connector": self.available_connectors[devices[device_name]]}
                    else:
                        log.info("Pair device %s - connector %s from persistent device storage - not found.", device_name, devices[device_name])
                except Exception as e:
                    log.exception(e)
                    continue
        else:
            log.debug("No device found in connected device file.")
            self.__connected_devices = {} if self.__connected_devices is None else self.__connected_devices 
Example 27
Project: dephell   Author: dephell   File: _shutil.py    License: MIT License 5 votes vote down vote up
def get_path_size(path: Path) -> int:
    if not path.exists():
        return 0
    if path.is_file():
        return getsize(str(path))
    total = 0
    for subpath in path.glob('**/*'):
        if subpath.is_file():
            total += getsize(str(subpath))
    return total 
Example 28
Project: ibllib   Author: int-brain-lab   File: onelight.py    License: MIT License 5 votes vote down vote up
def load_array(path):
    """Load a single file."""
    path = str(path)
    if path.endswith('.npy'):
        try:
            import numpy as np
            mmap_mode = 'r' if op.getsize(path) > 1e8 else None
            return np.load(path, mmap_mode=mmap_mode)
        except ImportError:
            logger.warning("NumPy is not available.")
            return
        except ValueError as e:
            logger.error("Impossible to read %s.", path)
            raise e
    elif path.endswith('.tsv'):
        try:
            import pandas as pd
            return pd.read_csv(path, sep='\t')
        except ImportError:
            logger.warning("Pandas is not available.")
        except ValueError as e:
            logger.error("Impossible to read %s.", path)
            raise e
    raise NotImplementedError(path)


# -------------------------------------------------------------------------------------------------
# File path parsing
# ------------------------------------------------------------------------------------------------- 
Example 29
Project: cassandra-dtest   Author: apache   File: sstablesplit_test.py    License: Apache License 2.0 5 votes vote down vote up
def _do_split(self, node, version):
        logger.debug("Run sstablesplit")
        time.sleep(5.0)
        node.stop()

        # default split size is 50MB
        splitmaxsize = 10
        expected_sstable_size = (10 * 1024 * 1024)
        keyspace = 'keyspace1'

        # get the initial sstables and their total size
        origsstables = node.get_sstables(keyspace, '')
        origsstable_size = sum([getsize(sstable) for sstable in origsstables])
        logger.debug("Original sstable and sizes before split: {}".format([(name, getsize(name)) for name in origsstables]))

        # calculate the expected number of sstables post-split
        expected_num_sstables = floor(origsstable_size / expected_sstable_size)

        # split the sstables
        result = node.run_sstablesplit(keyspace=keyspace, size=splitmaxsize,
                                       no_snapshot=True, debug=True)

        for (out, error, rc) in result:
            logger.debug("stdout: {}".format(out))
            logger.debug("stderr: {}".format(error))
            logger.debug("rc: {}".format(rc))

        # get the sstables post-split and their total size
        sstables = node.get_sstables(keyspace, '')
        logger.debug("Number of sstables after split: %s. expected %s" % (len(sstables), expected_num_sstables))
        assert expected_num_sstables <= len(sstables) + 1
        assert 1 <= len(sstables)

        # make sure none of the tables are bigger than the max expected size
        sstable_sizes = [getsize(sstable) for sstable in sstables]
        # add a bit extra for overhead
        assert max(sstable_sizes) <= expected_sstable_size + 512
        # make sure node can start with changed sstables
        node.start(wait_for_binary_proto=True) 
Example 30
Project: apio   Author: FPGAwars   File: test_init.py    License: GNU General Public License v2.0 5 votes vote down vote up
def validate_apio_ini(current_dir):
    path = join(current_dir, 'apio.ini')
    assert isfile(path) and getsize(path) > 0 
Example 31
Project: apio   Author: FPGAwars   File: test_init.py    License: GNU General Public License v2.0 5 votes vote down vote up
def validate_scons(apioproject_dir):
    path = join(apioproject_dir, 'SConstruct')
    assert isfile(path) and getsize(path) > 0 
Example 32
Project: apio   Author: FPGAwars   File: test_complete.py    License: GNU General Public License v2.0 5 votes vote down vote up
def validate_files_leds(apioproject_dir):
    path = join(apioproject_dir, 'leds.v')
    assert isfile(path) and getsize(path) > 0 
Example 33
Project: EDMarketConnector   Author: EDCD   File: dashboard.py    License: GNU General Public License v2.0 5 votes vote down vote up
def on_modified(self, event):
        # watchdog callback - DirModifiedEvent on macOS, FileModifiedEvent on Windows
        if event.is_directory or (isfile(event.src_path) and getsize(event.src_path)):	# Can get on_modified events when the file is emptied
            self.process(event.src_path if not event.is_directory else None)

    # Can be called either in watchdog thread or, if polling, in main thread. 
Example 34
Project: SublimeFileBrowser   Author: aziz   File: dired_misc.py    License: MIT License 5 votes vote down vote up
def get_info(self, path):
        self.preview_path = path
        self.parent = dirname(path)
        self.size = 0
        self.errors = []
        self._created, self._accessed, self._modified = get_dates(path)
        try:
            self.size += getsize(path)
        except OSError as e:
            self.errors.append(str(e))
        if not self.view.is_popup_visible():
            return
        sublime.set_timeout_async(self.update_preview, 1) 
Example 35
Project: crawl-dataset   Author: e-lab   File: getImgs.py    License: ISC License 5 votes vote down vote up
def checkValid(savePath):
	si = getsize(savePath)
	print(si)
	if si == 2051:
		print('Not valid delete')
		call(['rm', '-rf', str(savePath)])

#Download img 
Example 36
Project: D-VAE   Author: muhanzhang   File: test_d3viz.py    License: MIT License 5 votes vote down vote up
def check(self, f, reference=None, verbose=False):
        tmp_dir = tempfile.mkdtemp()
        html_file = pt.join(tmp_dir, 'index.html')
        if verbose:
            print(html_file)
        d3v.d3viz(f, html_file)
        assert pt.getsize(html_file) > 0
        if reference:
            assert filecmp.cmp(html_file, reference) 
Example 37
Project: exopy   Author: Exopy   File: api_builder.py    License: BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def shall_skip(module, opts):
    """Check if we want to skip this module."""
    # skip it if there is nothing (or just \n or \r\n) in the file
    if path.getsize(module) <= 2:
        return True
    # skip if it has a "private" name and this is selected
    filename = path.basename(module)
    if filename != '__init__.py' and filename.startswith('_') and \
       not opts.includeprivate:
        return True
    return False 
Example 38
Project: genielibs   Author: CiscoTestAutomation   File: ha.py    License: Apache License 2.0 5 votes vote down vote up
def check_disk_space(cls, device, disk, image):
        ''' Check if the ISSU state is in the expected state

            Args:
                device (`obj`): Device Object.
                disk (`str`): Memory location to check for space upto threshold:
                              - harddisk
                              - stby-harddisk:
                image (`str`): ISSU upgrade image full path

            Returns:
                None

            Raises:
                Exception: - Cannot parse 'dir <>' output
                           - Not enough space on disk

            Example:
                >>> check_disk_space(device=uut, disk='harddisk', image=image)
        '''

        # Acceptable states for ISSU to be in
        assert disk in ['harddisk:', 'stby-harddisk:']

        try:
            output = device.execute('dir {}'.format(disk))
        except Exception as e:
            raise Exception("Unable to execute 'dir {}'".format(disk))

        # 78704144384 bytes total (59693568000 bytes free)
        m = re.search('(?P<total>(\d+)) +bytes +total +\((?P<free>(\d+)) '
                      '+bytes +free\)', output)
        bytes_total = m.groupdict()['total']
        bytes_free = m.groupdict()['free']

        if getsize(image) > int(bytes_free):
            raise Exception("Not enough space on '{}' to copy ISSU image".\
                            format(disk))
        else:
            logger.info("Enough space on '{}' to copy ISSU image".format(disk)) 
Example 39
Project: QCSuper   Author: P1sec   File: _utils.py    License: GNU General Public License v3.0 5 votes vote down vote up
def __call__(self, path):
        
        path = expanduser(path)
        
        if path == '/dev/stdout' and 'a' in self.mode:
            
            self.mode = self.mode.replace('a', 'w')
        
        if path == '-':
            
            if 'r' in self.mode:
                file_obj = stdin.buffer if 'b' in self.mode else stdin
            else:
                file_obj = fdopen(dup(stdout.fileno()), 'wb' if 'b' in self.mode else 'w')
                dup2(stderr.fileno(), stdout.fileno())
            return file_obj
        
        elif path[-3:] != '.gz':
            
            file_obj = open(path, self.mode)
        
        else:
            
            file_obj = gzip.open(path, {'r': 'rt', 'a': 'at'}.get(self.mode, self.mode))
        
        file_obj.appending_to_file = bool(exists(path) and getsize(path))
        
        return file_obj 
Example 40
Project: DCRM   Author: 82Flex   File: statistics.py    License: GNU Affero General Public License v3.0 5 votes vote down vote up
def getdirsize(dir):
    size = 0
    for root, dirs, files in os.walk(dir):
        size += sum([getsize(join(root, name)) for name in files])
    return size