Python os.path.getsize() Examples

The following are 30 code examples of os.path.getsize(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module os.path , or try the search function .
Example #1
Source File: alarmdata.py    From SecPi with GNU General Public License v3.0 7 votes vote down vote up
def list(self):
		dirs = []
		# TODO: error management
		for d in listdir(self.datapath):
			dp = path.join(self.datapath, d)
			if path.isdir(dp):
				dirs.append({
					"name": d,
					"path": dp,
					"mtime": datetime.datetime.fromtimestamp(path.getmtime(dp)).strftime('%d.%m.%Y %H:%M:%S')
					# "size": path.getsize(dp),
					# "hsize": self.human_size(self.get_size(dp))
				})
		
		dirs.sort(key=lambda dir: dir['name'], reverse=True)
		
		return {'status': 'success', 'data': dirs} 
Example #2
Source File: limpp.py    From xbmc.service.pushbullet with GNU General Public License v3.0 6 votes vote down vote up
def __init__(self,addr=0,size=None,file=None,process=True):
        self.init()
        import TonyJpegDecoder
        if not size:
            size = ospath_getsize(file)
        self.file_size = size
        self.file = file
        self.addr = addr
        self.size = size
        self.type = 'JPG'
        self.header = {}
        self.decoder = TonyJpegDecoder.TonyJpegDecoder()
        self.data = self.decoder.DecompressImage(openfile(file,'rb').read())
        self.width = self.decoder.Width
        self.height = self.decoder.Height
        self.size_of_plane = self.width * self.height
        self.sourceBpp = 3
        self.size = size
        self.Bps = self.width * self.sourceBpp
        self.Bpp = 4
        self.final_size = self.size_of_plane * self.Bpp
        self.RGBA = None
        if not process: return
        self.Process_data() 
Example #3
Source File: downloader.py    From web2board with GNU Lesser General Public License v3.0 6 votes vote down vote up
def verify(self, sha1=None):
        _dlsize = getsize(self._destination)
        if _dlsize != self.get_size():
            raise FDSizeMismatch(_dlsize, self._fname, self.get_size())

        if not sha1:
            return

        dlsha1 = None
        try:
            result = util.exec_command(["sha1sum", self._destination])
            dlsha1 = result['out']
        except OSError:
            try:
                result = util.exec_command(
                    ["shasum", "-a", "1", self._destination])
                dlsha1 = result['out']
            except OSError:
                pass

        if dlsha1:
            dlsha1 = dlsha1[1:41] if dlsha1.startswith("\\") else dlsha1[:40]
            if sha1 != dlsha1:
                raise FDSHASumMismatch(dlsha1, self._fname, sha1) 
Example #4
Source File: download.py    From downloader-cli with MIT License 6 votes vote down vote up
def _parse_exists(self):
        """This function should be called if the file already exists.

        In that case there are two possibilities, it's partially downloaded
        or it's a proper file.
        """
        if self.overwrite:
            return
        elif self.continue_download:
            cur_size = path.getsize(self.des)
            original_size = urllib.request.urlopen(self.URL).info()[
                'Content-Length']

            if original_size is None:
                print("WARNING: Could not perform sanity check on partial download.",
                    file=self.ostream)
                self._build_headers(cur_size)
            elif cur_size < int(original_size):
                self._build_headers(cur_size)
        else:
            print("ERROR: File exists. See 'dw --help' for solutions.", file=self.ostream)
            exit(-1) 
Example #5
Source File: tqdm_open.py    From rupo with Apache License 2.0 6 votes vote down vote up
def tqdm_open(filename, encoding='utf8'):
    """
    Открытие файла, обёрнутое в tqdm
    """
    total = getsize(filename)

    def wrapped_line_iterator(fd):
        with tqdm(total=total, unit="B", unit_scale=True, desc=basename(filename), miniters=1) as pb:
            processed_bytes = 0
            for line in fd:
                processed_bytes += len(line)
                if processed_bytes >= 1024 * 1024:
                    pb.update(processed_bytes)
                    processed_bytes = 0
                yield line
            pb.update(processed_bytes)

    with open(filename, encoding=encoding) as fd:
        yield wrapped_line_iterator(fd) 
Example #6
Source File: librispeech.py    From End-to-end-ASR-Pytorch with MIT License 6 votes vote down vote up
def __init__(self, path, split, tokenizer, bucket_size, ascending=False):
        # Setup
        self.path = path
        self.bucket_size = bucket_size

        # List all wave files
        file_list = []
        for s in split:
            split_list = list(Path(join(path, s)).rglob("*.flac"))
            assert len(split_list) > 0, "No data found @ {}".format(join(path,s))
            file_list += split_list
        # Read text
        text = Parallel(n_jobs=READ_FILE_THREADS)(
            delayed(read_text)(str(f)) for f in file_list)
        #text = Parallel(n_jobs=-1)(delayed(tokenizer.encode)(txt) for txt in text)
        text = [tokenizer.encode(txt) for txt in text]

        # Sort dataset by text length
        #file_len = Parallel(n_jobs=READ_FILE_THREADS)(delayed(getsize)(f) for f in file_list)
        self.file_list, self.text = zip(*[(f_name, txt)
                                          for f_name, txt in sorted(zip(file_list, text), reverse=not ascending, key=lambda x:len(x[1]))]) 
Example #7
Source File: data_handling.py    From pyprophet with BSD 3-Clause "New" or "Revised" License 6 votes vote down vote up
def is_sqlite_file(filename):
    # https://stackoverflow.com/questions/12932607/how-to-check-with-python-and-sqlite3-if-one-sqlite-database-file-exists
    from os.path import isfile, getsize

    if not isfile(filename):
        return False
    if getsize(filename) < 100: # SQLite database file header is 100 bytes
        return False

    with open(filename, 'rb') as fd:
        header = fd.read(100)

    if 'SQLite format 3' in str(header):
        return True
    else:
        return False 
Example #8
Source File: limpp.py    From xbmc.service.pushbullet with GNU General Public License v3.0 6 votes vote down vote up
def __init__(self,addr=0,size=None,file=None,process=True):
        self.init()
        if not size:
            size = ospath_getsize(file)
        self.file_size = size
        self.file = file
        self.addr = addr
        self.size = size
        self.type = 'XBM'
        self.Read_header()
        self.size_of_plane = self.width * self.height
        self.Bps = self.width * self.sourceBpp
        self.Bpp = 4
        self.final_size = self.size_of_plane * self.Bpp
        self.RGBA = None
        if not process: return
        self.Process()
        self.RGBA.mono = True 
Example #9
Source File: limpp.py    From xbmc.service.pushbullet with GNU General Public License v3.0 6 votes vote down vote up
def __init__(self,addr=0,size=None,file=None,process=True):
        self.init()
        if not size:
            size = ospath_getsize(file)
        self.file_size = size
        self.file = file
        self.addr = addr
        self.size = size
        self.type = 'XPM'
        self.Read_header()
        self.size_of_plane = self.width * self.height
        self.Bps = self.width * self.sourceBpp
        self.Bpp = 4
        self.final_size = self.size_of_plane * self.Bpp
        self.RGBA = None
        if not process: return
        self.Process() 
Example #10
Source File: Storage.py    From p2ptv-pi with MIT License 6 votes vote down vote up
def enable_file(self, f):
        if self.config['encrypted_storage']:
            return
        if not self.disabled[f]:
            return
        self.disabled[f] = False
        r = self.file_ranges[f]
        if not r:
            return
        file = r[3]
        if not exists(file):
            h = open(file, 'wb+')
            h.flush()
            h.close()
        if not self.tops.has_key(file):
            self.tops[file] = getsize(file)
        if not self.mtimes.has_key(file):
            self.mtimes[file] = getmtime(file)
        self.working_ranges[f] = [r]
        if DEBUG:
            log(self.log_prefix + 'enable_file: f:', f, 'self.working_ranges:', self.working_ranges) 
Example #11
Source File: _utils.py    From QCSuper with GNU General Public License v3.0 6 votes vote down vote up
def __call__(self, path):
        
        path = expanduser(path)
        
        if path == '/dev/stdout' and 'a' in self.mode:
            
            self.mode = self.mode.replace('a', 'w')
        
        if path == '-':
            
            if 'r' in self.mode:
                file_obj = stdin.buffer if 'b' in self.mode else stdin
            else:
                file_obj = fdopen(dup(stdout.fileno()), 'wb' if 'b' in self.mode else 'w')
                dup2(stderr.fileno(), stdout.fileno())
            return file_obj
        
        elif path[-3:] != '.gz':
            
            file_obj = open(path, self.mode)
        
        else:
            
            file_obj = gzip.open(path, {'r': 'rt', 'a': 'at'}.get(self.mode, self.mode))
        
        file_obj.appending_to_file = bool(exists(path) and getsize(path))
        
        return file_obj 
Example #12
Source File: limpp.py    From xbmc.service.pushbullet with GNU General Public License v3.0 6 votes vote down vote up
def __init__(self,addr=0,size=None,file=None,process=True,options=None):
        self.default_icon = None
        if options:
            if options.has_key('icon'):
                self.default_icon = options['icon']
        self.init()
        if not size:
            size = ospath_getsize(file)
        self.file_size = size
        self.file = file
        self.addr = addr
        self.size = size
        self.type = 'ICO'
        self.Read_header()
        self.final_size = self.width * self.height * 4
        if process: self.Process() 
Example #13
Source File: fortran_binary.py    From seisflows with BSD 2-Clause "Simplified" License 6 votes vote down vote up
def _read(filename):
    """ Reads Fortran style binary data into numpy array
    """
    nbytes = getsize(filename)
    with open(filename, 'rb') as file:
        # read size of record
        file.seek(0)
        n = np.fromfile(file, dtype='int32', count=1)[0]
        if n == nbytes-8:
            file.seek(4)
            data = np.fromfile(file, dtype='float32')
            return data[:-1]
        else:
            file.seek(0)
            data = np.fromfile(file, dtype='float32')
            return data 
Example #14
Source File: limpp.py    From xbmc.service.pushbullet with GNU General Public License v3.0 6 votes vote down vote up
def __init__(self,addr=0,size=None,file=None,process=True):
        self.init()
        if not size:
            size = ospath_getsize(file)
        self.file_size = size
        self.file = file
        self.addr = addr
        self.size = size
        self.type = 'TGA'
        self.pallete = None
        self.Read_header()
        self.flipped = False
        if self.header['ImageDescriptor'] & 32:
            self.flipped = True
        self.alpha_bits = self.header['ImageDescriptor'] & 15
        self.Get_image_dimensions()
        self.size_of_plane = self.width * self.height
        self.sourceBpp = self.header['BitsPerPixel']/8
        self.data_size = self.width * self.height * self.sourceBpp
        self.Bps = self.width * self.sourceBpp
        self.Bpp = 4
        self.final_size = self.size_of_plane * self.Bpp
        self.RGBA = None
        if not process: return
        self.Process() 
Example #15
Source File: dynamodb_to_s3.py    From airflow with Apache License 2.0 6 votes vote down vote up
def _scan_dynamodb_and_upload_to_s3(self, temp_file, scan_kwargs, table):
        while True:
            response = table.scan(**scan_kwargs)
            items = response['Items']
            for item in items:
                temp_file.write(self.process_func(item))

            if 'LastEvaluatedKey' not in response:
                # no more items to scan
                break

            last_evaluated_key = response['LastEvaluatedKey']
            scan_kwargs['ExclusiveStartKey'] = last_evaluated_key

            # Upload the file to S3 if reach file size limit
            if getsize(temp_file.name) >= self.file_size:
                _upload_file_to_s3(temp_file, self.s3_bucket_name,
                                   self.s3_key_prefix)
                temp_file.close()
                temp_file = NamedTemporaryFile()
        return temp_file 
Example #16
Source File: test_protein_seqprop.py    From ssbio with MIT License 6 votes vote down vote up
def test_write_gff_file(self, seqprop_with_i, tmpdir):
        """Test writing the features, and that features are now loaded from a file"""
        outpath = tmpdir.join('test_seqprop_with_i_write_gff_file.gff').strpath
        seqprop_with_i.write_gff_file(outfile=outpath, force_rerun=True)

        # Test that the file was written
        assert op.exists(outpath)
        assert op.getsize(outpath) > 0

        # Test that file paths are correct
        assert seqprop_with_i.feature_path == outpath
        assert seqprop_with_i.feature_file == 'test_seqprop_with_i_write_gff_file.gff'
        assert seqprop_with_i.feature_dir == tmpdir

        # Test that features cannot be changed
        with pytest.raises(ValueError):
            seqprop_with_i.features = ['NOFEATURES'] 
Example #17
Source File: dir_compare.py    From hacker-scripts with MIT License 6 votes vote down vote up
def traverse_dir(path):
    file_dict = {}
    dir_dict = {}
    count = 1
    for root, dirs, files in walk(path):
        for d in dirs:
            abs_p = join(root, d)
            dir_dict[abs_p] = 0
            print(abs_p)
            count += 1
            if count % 200 == 0:
                print('%s files scanned' % count)

        for f in files:
            abs_p = join(root, f)
            file_dict[abs_p] = getsize(abs_p)
            print(abs_p)
            count += 1
            if count % 200 == 0:
                print('%s files scanned' % count)

    return file_dict, dir_dict 
Example #18
Source File: readRinexObs.py    From PyGPS with GNU Affero General Public License v3.0 6 votes vote down vote up
def rinexobs(obsfn,writeh5=None,maxtimes=None):
    stem,ext = splitext(expanduser(obsfn))
    if ext[-1].lower() == 'o': #raw text file
        with open(obsfn,'r') as f:
            t=time.time()
            lines = f.read().splitlines(True)
            lines.append('')
            header,version,headlines,obstimes,sats,svset = scan(lines)
            print('{} is a RINEX {} file, {} kB.'.format(obsfn,version,getsize(obsfn)/1000.0))
            data = processBlocks(lines,header,obstimes,svset,headlines,sats)
            print("finished in {0:.2f} seconds".format(time.time()-t))
    #%% save to disk (optional)
        if writeh5:
            h5fn = stem + '.h5'
            print('saving OBS data to {}'.format(h5fn))
            data.to_hdf(h5fn,key='OBS',mode='a',complevel=6,append=False)
    elif ext.lower() == '.h5':
        data = read_hdf(obsfn,key='OBS')
        print('loaded OBS data from {} to {}'.format(blocks.items[0],blocks.items[-1]))
    return data


# this will scan the document for the header info and for the line on
# which each block starts 
Example #19
Source File: Storage.py    From p2ptv-pi with MIT License 6 votes vote down vote up
def pickle(self):
        files = []
        pfiles = []
        for i in xrange(len(self.files)):
            if not self.files[i][1]:
                continue
            if self.disabled[i]:
                for file, start, end in self._get_disabled_ranges(i)[2]:
                    pfiles.extend([basename(file), getsize(file), getmtime(file)])

                continue
            file = self.files[i][0].encode('utf-8')
            files.extend([i, getsize(file), getmtime(file)])

        return {'files': files,
         'partial files': pfiles} 
Example #20
Source File: Storage.py    From p2ptv-pi with MIT License 6 votes vote down vote up
def disable_file(self, f):
        if self.config['encrypted_storage']:
            return
        if self.disabled[f]:
            return
        self.disabled[f] = True
        r = self._get_disabled_ranges(f)
        if not r:
            return
        for file, begin, end in r[2]:
            if not os.path.isdir(self.bufferdir):
                os.makedirs(self.bufferdir)
            if not exists(file):
                h = open(file, 'wb+')
                h.flush()
                h.close()
            if not self.tops.has_key(file):
                self.tops[file] = getsize(file)
            if not self.mtimes.has_key(file):
                self.mtimes[file] = getmtime(file)

        self.working_ranges[f] = r[0] 
Example #21
Source File: gutter_color.py    From GutterColor with MIT License 6 votes vote down vote up
def clear_cache(force = False):
  """
  If the folder exists, and has more than 5MB of icons in the cache, delete
  it to clear all the icons then recreate it.
  """
  from os.path import getsize, join, isfile, exists
  from os import makedirs, listdir
  from sublime import cache_path
  from shutil import rmtree

  # The icon cache path
  icon_path = join(cache_path(), "GutterColor")

  # The maximum amount of space to take up
  limit = 5242880 # 5 MB

  if exists(icon_path):
    size = sum(getsize(join(icon_path, f)) for f in listdir(icon_path) if isfile(join(icon_path, f)))
    if force or (size > limit): rmtree(icon_path)

  if not exists(icon_path): makedirs(icon_path) 
Example #22
Source File: cos_common.py    From cos-python-sdk-v4 with Apache License 2.0 6 votes vote down vote up
def get_sha1_by_slice(file_name, slice_size):
        """ Get SHA array based on Qcloud Slice Upload Interface

        :param file_name: local file path
        :param slice_size: slice size in bit
        :return: sha array like [{“offset”:0, “datalen”:1024,”datasha”:”aaa”}, {}, {}]
        """
        from os import path

        with open(file_name, 'rb') as f:

            result = []
            file_size = path.getsize(file_name)
            sha1_obj = Sha1Hash()
            for current_offset in range(0, file_size, slice_size):

                data_length = min(slice_size, file_size - current_offset)
                sha1_obj.update(f.read(data_length))
                sha1_val = sha1_obj.inner_digest()
                result.append({"offset": current_offset, "datalen": data_length, "datasha": sha1_val})

            result[-1]['datasha'] = sha1_obj.hexdigest()
            return result 
Example #23
Source File: Storage.py    From p2ptv-pi with MIT License 6 votes vote down vote up
def _close(self, file):
        f = self.handles[file]
        del self.handles[file]
        if self.whandles.has_key(file):
            del self.whandles[file]
            f.flush()
            self.unlock_file(file, f)
            f.close()
            if os.path.isfile(file):
                self.tops[file] = getsize(file)
                self.mtimes[file] = getmtime(file)
            else:
                if DEBUG:
                    log(self.log_prefix + '_close: missing file', file)
                self.tops[file] = 0
                self.mtimes[file] = 0
        else:
            if self.lock_while_reading:
                self.unlock_file(file, f)
            f.close() 
Example #24
Source File: cos_client.py    From coscmd with MIT License 5 votes vote down vote up
def upload_file(self, local_path, cos_path, _http_headers='{}', **kwargs):
        file_size = path.getsize(local_path)
        if file_size <= self._conf._part_size * 1024 * 1024 + 1024 or file_size <= self._multiupload_threshold:
            return self.single_upload(local_path, cos_path, _http_headers, **kwargs)
        else:
            return self.multipart_upload(local_path, cos_path, _http_headers, **kwargs) 
Example #25
Source File: alarmdata.py    From SecPi with GNU General Public License v3.0 5 votes vote down vote up
def get_size(self, start_path):
		total_size = 0
		for dirpath, dirnames, filenames in walk(start_path):
			for f in filenames:
				fp = path.join(dirpath, f)
				total_size += path.getsize(fp)
				
		return total_size 
Example #26
Source File: test_samples.py    From svglib with GNU Lesser General Public License v3.0 5 votes vote down vote up
def test_convert_pdf_uniconv(self):
        "Test converting W3C SVG files to PDF using uniconverter."

        paths = glob.glob("%s/svg/*" % self.folder_path)
        paths = [p for p in paths if splitext(p.lower())[1] in [".svg", ".svgz"]]
        for path in paths:
            out = splitext(path)[0] + '-uniconv.pdf'
            cmd = "uniconv '%s' '%s'" % (path, out)
            os.popen(cmd).read()
            if exists(out) and getsize(out) == 0:
                os.remove(out) 
Example #27
Source File: torrent.py    From torrentool with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def _get_target_files_info(cls, src_path: Path) -> Tuple[List[Tuple[str, int, List[str]]], int]:
        is_dir = src_path.is_dir()

        src_path = f'{src_path}'  # Force walk() to return unicode names.
        target_files = []

        if is_dir:
            for base, _, files in walk(src_path):
                target_files.extend([join(base, fname) for fname in sorted(files)])

        else:
            target_files.append(src_path)

        target_files_ = []
        total_size = 0

        for fpath in target_files:
            file_size = getsize(fpath)

            if not file_size:
                continue

            target_files_.append((fpath, file_size, normpath(fpath.replace(src_path, '')).strip(sep).split(sep)))
            total_size += file_size

        return target_files_, total_size 
Example #28
Source File: test_speed.py    From textX with MIT License 5 votes vote down vote up
def timeit(file_name, message, **kwargs):
    print(message, 'File:', file_name)
    file_name = join(dirname(__file__), 'test_inputs', file_name)
    file_size = getsize(file_name)
    print('File size: {:.2f}'.format(file_size/1000), 'KB')

    mm = metamodel_from_file('rhapsody.tx', **kwargs)

    t_start = time.time()
    mm.model_from_file(file_name)
    t_end = time.time()

    print('Elapsed time: {:.2f}'.format(t_end - t_start), 'sec')
    print('Speed = {:.2f}'.format(file_size/1000/(t_end - t_start)), 'KB/sec\n') 
Example #29
Source File: watch.py    From logscan with Apache License 2.0 5 votes vote down vote up
def __init__(self, filename, counter):
        self.filename = path.abspath(filename)
        self.queue = Queue()
        self.check_chain = CheckerChain(self.queue, counter)
        self.observer = Observer()
        self.fd = None
        self.offset = 0
        if path.isfile(self.filename):
            self.fd = open(self.filename)
            self.offset = path.getsize(self.filename) 
Example #30
Source File: watch.py    From logscan with Apache License 2.0 5 votes vote down vote up
def on_moved(self, event):
        if path.abspath(event.src_path) == self.filename:
            self.fd.close()
        if path.abspath(event.dest_path) == self.filename and path.isfile(self.filename):
            self.fd = open(self.filename)
            self.offset = path.getsize(self.filename)