Python itertools.islice() Examples

The following are 30 code examples for showing how to use itertools.islice(). These examples are extracted from open source projects. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example.

You may check out the related API usage on the sidebar.

You may also want to check out all available functions/classes of the module itertools , or try the search function .

Example 1
Project: L3C-PyTorch   Author: fab-jul   File: pytorch_ext.py    License: GNU General Public License v3.0 6 votes vote down vote up
def assert_equal(t1, t2, show_num_wrong=3, names=None, msg=''):
    if t1.shape != t2.shape:
        raise AssertionError('Different shapes! {} != {}'.format(t1.shape, t2.shape))
    wrong = t1 != t2
    if not wrong.any():
        return
    if names is None:
        names = ('t1', 't2')
    wrong_idxs = wrong.nonzero()
    num_wrong = len(wrong_idxs)
    show_num_wrong = min(show_num_wrong, num_wrong)
    wrong_idxs = itertools.islice((tuple(i.tolist()) for i in wrong_idxs),
                                  show_num_wrong)
    err_msg = ' // '.join('{}: {}!={}'.format(idx, t1[idx], t2[idx])
                          for idx in wrong_idxs)
    raise AssertionError(('{} != {}: {}, and {}/{} other(s) '.format(
            names[0], names[1], err_msg, num_wrong - show_num_wrong, np.prod(t1.shape)) + msg).strip()) 
Example 2
Project: jawfish   Author: war-and-code   File: heapq.py    License: MIT License 6 votes vote down vote up
def nlargest(n, iterable):
    """Find the n largest elements in a dataset.

    Equivalent to:  sorted(iterable, reverse=True)[:n]
    """
    if n < 0:
        return []
    it = iter(iterable)
    result = list(islice(it, n))
    if not result:
        return result
    heapify(result)
    _heappushpop = heappushpop
    for elem in it:
        _heappushpop(result, elem)
    result.sort(reverse=True)
    return result 
Example 3
Project: jawfish   Author: war-and-code   File: heapq.py    License: MIT License 6 votes vote down vote up
def nsmallest(n, iterable):
    """Find the n smallest elements in a dataset.

    Equivalent to:  sorted(iterable)[:n]
    """
    if n < 0:
        return []
    it = iter(iterable)
    result = list(islice(it, n))
    if not result:
        return result
    _heapify_max(result)
    _heappushpop = _heappushpop_max
    for elem in it:
        _heappushpop(result, elem)
    result.sort()
    return result

# 'heap' is a heap at all indices >= startpos, except possibly for pos.  pos
# is the index of a leaf with a possibly out-of-order value.  Restore the
# heap invariant. 
Example 4
Project: razzy-spinner   Author: rafasashi   File: conll2000.py    License: GNU General Public License v3.0 6 votes vote down vote up
def demo():
    from en.parser.nltk_lite.corpora import conll2000
    from itertools import islice

    print "CONLL Chunked data\n"
    
    print "Raw text:"
    for sent in islice(conll2000.raw(), 0, 5):
        print sent
    print

    print "Tagged text:"
    for sent in islice(conll2000.tagged(), 0, 5):
        print sent
    print

    print "Chunked text:"
    for tree in islice(conll2000.chunked(chunk_types=('NP', 'PP', 'VP')), 0, 5):
        print tree.pp()
    print 
Example 5
Project: razzy-spinner   Author: rafasashi   File: toolbox.py    License: GNU General Public License v3.0 6 votes vote down vote up
def demo():
    from en.parser.nltk_lite.corpora import toolbox
    from itertools import islice
    from pprint import pprint

    print 'Raw:'
    pprint(list(islice(toolbox.raw(), 3)))

    print 'Dictionary:'
    pprint(list(islice(toolbox.dictionary(), 3)))

    print 'Dictionary-List:'
    pprint(list(islice(toolbox.dict_list(), 3)))

    print 'Complex test cases, no header'
    pprint(list(toolbox.raw("test.dic")))

    print 'Complex test cases, no header, dictionary'
    pprint(list(toolbox.dictionary("test.dic")))

    print 'Complex test cases, no header, dictionary list'
    pprint(list(toolbox.dict_list("test.dic")))

    print 'Complex test cases, with header'
    pprint(list(toolbox.raw("test.dic", include_header=True))) 
Example 6
Project: misp42splunk   Author: remg427   File: nativetypes.py    License: GNU Lesser General Public License v3.0 6 votes vote down vote up
def native_concat(nodes):
    """Return a native Python type from the list of compiled nodes. If the
    result is a single node, its value is returned. Otherwise, the nodes are
    concatenated as strings. If the result can be parsed with
    :func:`ast.literal_eval`, the parsed value is returned. Otherwise, the
    string is returned.
    """
    head = list(islice(nodes, 2))

    if not head:
        return None

    if len(head) == 1:
        out = head[0]
    else:
        out = u''.join([text_type(v) for v in chain(head, nodes)])

    try:
        return literal_eval(out)
    except (ValueError, SyntaxError, MemoryError):
        return out 
Example 7
Project: misp42splunk   Author: remg427   File: nativetypes.py    License: GNU Lesser General Public License v3.0 6 votes vote down vote up
def native_concat(nodes):
    """Return a native Python type from the list of compiled nodes. If the
    result is a single node, its value is returned. Otherwise, the nodes are
    concatenated as strings. If the result can be parsed with
    :func:`ast.literal_eval`, the parsed value is returned. Otherwise, the
    string is returned.
    """
    head = list(islice(nodes, 2))

    if not head:
        return None

    if len(head) == 1:
        out = head[0]
    else:
        out = u''.join([text_type(v) for v in chain(head, nodes)])

    try:
        return literal_eval(out)
    except (ValueError, SyntaxError, MemoryError):
        return out 
Example 8
Project: word2vec-twitter   Author: loretoparisi   File: word2vecReaderUtils.py    License: MIT License 6 votes vote down vote up
def chunkize_serial(iterable, chunksize, as_numpy=False):
    """
    Return elements from the iterable in `chunksize`-ed lists. The last returned
    element may be smaller (if length of collection is not divisible by `chunksize`).

    >>> print(list(grouper(range(10), 3)))
    [[0, 1, 2], [3, 4, 5], [6, 7, 8], [9]]

    """
    import numpy
    it = iter(iterable)
    while True:
        if as_numpy:
            # convert each document to a 2d numpy array (~6x faster when transmitting
            # chunk data over the wire, in Pyro)
            wrapped_chunk = [[numpy.array(doc) for doc in itertools.islice(it, int(chunksize))]]
        else:
            wrapped_chunk = [list(itertools.islice(it, int(chunksize)))]
        if not wrapped_chunk[0]:
            break
        # memory opt: wrap the chunk and then pop(), to avoid leaving behind a dangling reference
        yield wrapped_chunk.pop() 
Example 9
Project: vergeml   Author: mme   File: test_views.py    License: MIT License 5 votes vote down vote up
def test_iterview_infinite():
    loader = LiveLoader('.cache', SourceTest())
    iterview = IteratorView(loader, 'train', infinite=True)
    assert list(map(lambda tp: tp[0], itertools.islice(iterview, 150))) \
        == list(range(100)) + list(range(50)) 
Example 10
Project: vergeml   Author: mme   File: test_views.py    License: MIT License 5 votes vote down vote up
def test_iterview_random():
    loader = LiveLoader('.cache', SourceTest())
    iterview = IteratorView(loader, 'train', randomize=True, fetch_size=1)
    assert list(map(lambda tp: tp[0], itertools.islice(iterview, 10))) \
        == [92, 1, 43, 61, 35, 73, 48, 18, 98, 36] 
Example 11
Project: vergeml   Author: mme   File: test_views.py    License: MIT License 5 votes vote down vote up
def test_iterview_random_fetch_size():
    loader = LiveLoader('.cache', SourceTest())
    iterview = IteratorView(loader, 'train', randomize=True, fetch_size=10)
    assert list(map(lambda tp: tp[0], itertools.islice(iterview, 10))) \
        == list(range(70, 80)) 
Example 12
Project: vergeml   Author: mme   File: test_views.py    License: MIT License 5 votes vote down vote up
def test_iterview_random2():
    loader = LiveLoader('.cache', SourceTest())
    iterview = IteratorView(loader, 'train', randomize=True, fetch_size=1)
    iterview2 = IteratorView(loader, 'train', randomize=True, random_seed=2601, fetch_size=1)
    assert list(map(lambda tp: tp[0], itertools.islice(iterview2, 10))) \
        != list(map(lambda tp: tp[0], itertools.islice(iterview, 10))) 
Example 13
Project: MPContribs   Author: materialsproject   File: matbench_upload.py    License: MIT License 5 votes vote down vote up
def chunks(data, SIZE=500):
    it = iter(data)
    for i in range(0, len(data), SIZE):
        if isinstance(data, dict):
            yield {k: data[k] for k in islice(it, SIZE)}
        else:
            yield data[i : i + SIZE] 
Example 14
Project: InsightAgent   Author: insightfinder   File: getmetrics_cadvisor.py    License: Apache License 2.0 5 votes vote down vote up
def chunk_map(data, SIZE=50):
    """Yield successive n-sized chunks from l."""
    it = iter(data)
    for i in xrange(0, len(data), SIZE):
        yield {k: data[k] for k in islice(it, SIZE)} 
Example 15
Project: InsightAgent   Author: insightfinder   File: getlogs_k8s.py    License: Apache License 2.0 5 votes vote down vote up
def chunk_map(data, SIZE=50):
    """Yield successive n-sized chunks from l."""
    it = iter(data)
    for i in xrange(0, len(data), SIZE):
        yield {k: data[k] for k in islice(it, SIZE)} 
Example 16
Project: InsightAgent   Author: insightfinder   File: getlogs_evtx.py    License: Apache License 2.0 5 votes vote down vote up
def chunk_map(data, SIZE=50):
    """Yield successive n-sized chunks from l."""
    it = iter(data)
    for i in xrange(0, len(data), SIZE):
        yield {k: data[k] for k in islice(it, SIZE)} 
Example 17
Project: InsightAgent   Author: insightfinder   File: getlogs_spark.py    License: Apache License 2.0 5 votes vote down vote up
def chunk_map(data, SIZE=50):
    """Yield successive n-sized chunks from l."""
    it = iter(data)
    for i in xrange(0, len(data), SIZE):
        yield {k: data[k] for k in islice(it, SIZE)} 
Example 18
Project: InsightAgent   Author: insightfinder   File: getmetrics_sar.py    License: Apache License 2.0 5 votes vote down vote up
def chunk_map(data, SIZE=50):
    """Yield successive n-sized chunks from l."""
    it = iter(data)
    for i in xrange(0, len(data), SIZE):
        yield {k: data[k] for k in islice(it, SIZE)} 
Example 19
Project: InsightAgent   Author: insightfinder   File: getmessages_prometheus.py    License: Apache License 2.0 5 votes vote down vote up
def chunk_map(data, SIZE=50):
    """Yield successive n-sized chunks from l."""
    it = iter(data)
    for i in xrange(0, len(data), SIZE):
        yield {k: data[k] for k in islice(it, SIZE)} 
Example 20
Project: InsightAgent   Author: insightfinder   File: getlogs_hadoop-mapreduce.py    License: Apache License 2.0 5 votes vote down vote up
def chunk_map(data, SIZE=50):
    """Yield successive n-sized chunks from l."""
    it = iter(data)
    for i in xrange(0, len(data), SIZE):
        yield {k: data[k] for k in islice(it, SIZE)} 
Example 21
Project: InsightAgent   Author: insightfinder   File: getmetrics_zipkin.py    License: Apache License 2.0 5 votes vote down vote up
def chunk_map(data, SIZE=50):
    """Yield successive n-sized chunks from l."""
    it = iter(data)
    for i in xrange(0, len(data), SIZE):
        yield {k: data[k] for k in islice(it, SIZE)} 
Example 22
Project: InsightAgent   Author: insightfinder   File: getmetrics_datadog.py    License: Apache License 2.0 5 votes vote down vote up
def chunk_map(data, SIZE=50):
    """Yield successive n-sized chunks from l."""
    it = iter(data)
    for i in xrange(0, len(data), SIZE):
        yield {k: data[k] for k in islice(it, SIZE)} 
Example 23
Project: eth-account   Author: ethereum   File: transactions.py    License: MIT License 5 votes vote down vote up
def strip_signature(txn):
    unsigned_parts = itertools.islice(txn, len(UNSIGNED_TRANSACTION_FIELDS))
    return list(unsigned_parts) 
Example 24
Project: L3C-PyTorch   Author: fab-jul   File: auto_crop.py    License: GNU General Public License v3.0 5 votes vote down vote up
def stitch(parts):
    side = int(math.sqrt(len(parts)))
    if side * side != len(parts):
        raise ValueError(f'Invalid number of parts {len(parts)}')

    rows = []

    # Sort by original position in image
    crops_idx_mapping = _get_crop_idx_mapping(side)
    parts_sorted = (
        part for _, part in sorted(
        enumerate(parts), key=lambda ip: crops_idx_mapping[ip[0]]))

    parts_itr = iter(parts_sorted)  # Turn into iterator so we can easily grab elements
    for _ in range(side):
        parts_row = itertools.islice(parts_itr, side)  # Get `side` number of parts
        row = torch.cat(list(parts_row), dim=3)  # cat on W dimension
        rows.append(row)

    assert next(parts_itr, None) is None, f'Iterator should be empty, got {len(rows)} rows'
    img = torch.cat(rows, dim=2)  # cat on H dimension

    # Validate.
    B, C, H_part, W_part = parts[0].shape
    expected_shape = (B, C, H_part * side, W_part * side)
    assert img.shape == expected_shape, f'{img.shape} != {expected_shape}'

    return img 
Example 25
Project: scanorama   Author: brianhie   File: scanorama.py    License: MIT License 5 votes vote down vote up
def plot_clusters(coords, clusters, s=1, colors=None):
    if coords.shape[0] != clusters.shape[0]:
        sys.stderr.write(
            'Error: mismatch, {} cells, {} labels\n'
            .format(coords.shape[0], clusters.shape[0])
        )
        exit(1)

    if colors is None:
        colors = np.array(
            list(islice(cycle([
                '#377eb8', '#ff7f00', '#4daf4a',
                '#f781bf', '#a65628', '#984ea3',
                '#999999', '#e41a1c', '#dede00',
                '#ffe119', '#e6194b', '#ffbea3',
                '#911eb4', '#46f0f0', '#f032e6',
                '#d2f53c', '#008080', '#e6beff',
                '#aa6e28', '#800000', '#aaffc3',
                '#808000', '#ffd8b1', '#000080',
                '#808080', '#fabebe', '#a3f4ff'
            ]), int(max(clusters) + 1)))
        )

    plt.figure()
    plt.scatter(coords[:, 0], coords[:, 1],
                c=colors[clusters], s=s)

# Put datasets into a single matrix with the intersection of all genes. 
Example 26
Project: pointnet-registration-framework   Author: vinits5   File: plyfile.py    License: MIT License 5 votes vote down vote up
def _read_txt(self, stream):
        '''
        Load a PLY element from an ASCII-format PLY file.  The element
        may contain list properties.

        '''
        self._data = _np.empty(self.count, dtype=self.dtype())

        k = 0
        for line in _islice(iter(stream.readline, b''), self.count):
            fields = iter(line.strip().split())
            for prop in self.properties:
                try:
                    self._data[prop.name][k] = prop._from_fields(fields)
                except StopIteration:
                    raise PlyParseError("early end-of-line",
                                        self, k, prop)
                except ValueError:
                    raise PlyParseError("malformed input",
                                        self, k, prop)
            try:
                next(fields)
            except StopIteration:
                pass
            else:
                raise PlyParseError("expected end-of-line", self, k)
            k += 1

        if k < self.count:
            del self._data
            raise PlyParseError("early end-of-file", self, k) 
Example 27
Project: pointnet-registration-framework   Author: vinits5   File: plyfile.py    License: MIT License 5 votes vote down vote up
def _from_fields(self, fields):
        (len_t, val_t) = self.list_dtype()

        n = int(_np.dtype(len_t).type(next(fields)))

        data = _np.loadtxt(list(_islice(fields, n)), val_t, ndmin=1)
        if len(data) < n:
            raise StopIteration

        return data 
Example 28
Project: gnocchi   Author: gnocchixyz   File: utils.py    License: Apache License 2.0 5 votes vote down vote up
def grouper(iterable, n):
    it = iter(iterable)
    while True:
        chunk = tuple(itertools.islice(it, n))
        if not chunk:
            return
        yield chunk 
Example 29
Project: jawfish   Author: war-and-code   File: reprlib.py    License: MIT License 5 votes vote down vote up
def _repr_iterable(self, x, level, left, right, maxiter, trail=''):
        n = len(x)
        if level <= 0 and n:
            s = '...'
        else:
            newlevel = level - 1
            repr1 = self.repr1
            pieces = [repr1(elem, newlevel) for elem in islice(x, maxiter)]
            if n > maxiter:  pieces.append('...')
            s = ', '.join(pieces)
            if n == 1 and trail:  right = trail + right
        return '%s%s%s' % (left, s, right) 
Example 30
Project: jawfish   Author: war-and-code   File: reprlib.py    License: MIT License 5 votes vote down vote up
def repr_dict(self, x, level):
        n = len(x)
        if n == 0: return '{}'
        if level <= 0: return '{...}'
        newlevel = level - 1
        repr1 = self.repr1
        pieces = []
        for key in islice(_possibly_sorted(x), self.maxdict):
            keyrepr = repr1(key, newlevel)
            valrepr = repr1(x[key], newlevel)
            pieces.append('%s: %s' % (keyrepr, valrepr))
        if n > self.maxdict: pieces.append('...')
        s = ', '.join(pieces)
        return '{%s}' % (s,)