Python itertools.chain() Examples
The following are 30
code examples of itertools.chain().
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
You may also want to check out all available functions/classes of the module
itertools
, or try the search function
.
Example #1
Source File: argparse_to_json.py From me-ica with GNU Lesser General Public License v2.1 | 6 votes |
def process(parser, widget_dict): mutually_exclusive_groups = [ [mutex_action for mutex_action in group_actions._group_actions] for group_actions in parser._mutually_exclusive_groups] group_options = list(chain(*mutually_exclusive_groups)) base_actions = [action for action in parser._actions if action not in group_options and action.dest != 'help'] required_actions = filter(is_required, base_actions) optional_actions = filter(is_optional, base_actions) return list(categorize(required_actions, widget_dict, required=True)) + \ list(categorize(optional_actions, widget_dict)) + \ map(build_radio_group, mutually_exclusive_groups)
Example #2
Source File: single_path_functions.py From apted with MIT License | 6 votes |
def sub_spf1(ni, subtree1, subtree2, op, calculate): """Implements spf1 single path function for the case when the other subtree is a single node Params: ni -- node indexer for the subtree that has more than one element subtree1 -- subtree that has a single element subtree2 -- subtree that has more than one element op -- cost of deleting/inserting node calculate -- function(node, other) that returns the cost of renaming nodes """ # pylint: disable=invalid-name # pylint: disable=too-many-arguments cost = subtree2.sum_cost max_cost = cost + op min_ren_minus_op = min(chain([cost], [ calculate(subtree1, info) for _, info in ni.preorder_ltr(subtree2) ])) return min(min_ren_minus_op + cost, max_cost)
Example #3
Source File: hmm.py From razzy-spinner with GNU General Public License v3.0 | 6 votes |
def point_entropy(self, unlabeled_sequence): """ Returns the pointwise entropy over the possible states at each position in the chain, given the observation sequence. """ unlabeled_sequence = self._transform(unlabeled_sequence) T = len(unlabeled_sequence) N = len(self._states) alpha = self._forward_probability(unlabeled_sequence) beta = self._backward_probability(unlabeled_sequence) normalisation = logsumexp2(alpha[T-1]) entropies = np.zeros(T, np.float64) probs = np.zeros(N, np.float64) for t in range(T): for s in range(N): probs[s] = alpha[t, s] + beta[t, s] - normalisation for s in range(N): entropies[t] -= 2**(probs[s]) * probs[s] return entropies
Example #4
Source File: block_base.py From DOTA_models with Apache License 2.0 | 6 votes |
def CreateWeightLoss(self): """Returns L2 loss list of (almost) all variables used inside this block. When this method needs to be overridden, there are two choices. 1. Override CreateWeightLoss() to change the weight loss of all variables that belong to this block, both directly and indirectly. 2. Override _CreateWeightLoss() to change the weight loss of all variables that directly belong to this block but not to the sub-blocks. Returns: A Tensor object or None. """ losses = list(itertools.chain( itertools.chain.from_iterable( t.CreateWeightLoss() for t in self._subblocks), self._CreateWeightLoss())) return losses
Example #5
Source File: app.py From quart with MIT License | 6 votes |
def do_teardown_request( self, exc: Optional[BaseException], request_context: Optional[RequestContext] = None ) -> None: """Teardown the request, calling the teardown functions. Arguments: exc: Any exception not handled that has caused the request to teardown. request_context: The request context, optional as Flask omits this argument. """ request_ = (request_context or _request_ctx_stack.top).request functions = self.teardown_request_funcs[None] blueprint = request_.blueprint if blueprint is not None: functions = chain(functions, self.teardown_request_funcs[blueprint]) # type: ignore for function in functions: await function(exc) await request_tearing_down.send(self, exc=exc)
Example #6
Source File: app.py From quart with MIT License | 6 votes |
def do_teardown_websocket( self, exc: Optional[BaseException], websocket_context: Optional[WebsocketContext] = None ) -> None: """Teardown the websocket, calling the teardown functions. Arguments: exc: Any exception not handled that has caused the websocket to teardown. websocket_context: The websocket context, optional as Flask omits this argument. """ websocket_ = (websocket_context or _websocket_ctx_stack.top).websocket functions = self.teardown_websocket_funcs[None] blueprint = websocket_.blueprint if blueprint is not None: functions = chain(functions, self.teardown_websocket_funcs[blueprint]) # type: ignore for function in functions: await function(exc) await websocket_tearing_down.send(self, exc=exc)
Example #7
Source File: app.py From quart with MIT License | 6 votes |
def preprocess_request( self, request_context: Optional[RequestContext] = None ) -> Optional[ResponseReturnValue]: """Preprocess the request i.e. call before_request functions. Arguments: request_context: The request context, optional as Flask omits this argument. """ request_ = (request_context or _request_ctx_stack.top).request blueprint = request_.blueprint processors = self.url_value_preprocessors[None] if blueprint is not None: processors = chain(processors, self.url_value_preprocessors[blueprint]) # type: ignore for processor in processors: processor(request.endpoint, request.view_args) functions = self.before_request_funcs[None] if blueprint is not None: functions = chain(functions, self.before_request_funcs[blueprint]) # type: ignore for function in functions: result = await function() if result is not None: return result return None
Example #8
Source File: didyoumean_internal.py From DidYouMean-Python with MIT License | 6 votes |
def get_attribute_suggestions(type_str, attribute, frame): """Get the suggestions closest to the attribute name for a given type.""" types = get_types_for_str(type_str, frame) attributes = set(a for t in types for a in dir(t)) if type_str == 'module': # For module, we manage to get the corresponding 'module' type # but the type doesn't bring much information about its content. # A hacky way to do so is to assume that the exception was something # like 'module_name.attribute' so that we can actually find the module # based on the name. Eventually, we check that the found object is a # module indeed. This is not failproof but it brings a whole lot of # interesting suggestions and the (minimal) risk is to have invalid # suggestions. module_name = frame.f_code.co_names[0] objs = get_objects_in_frame(frame) mod = objs[module_name][0].obj if inspect.ismodule(mod): attributes = set(dir(mod)) return itertools.chain( suggest_attribute_as_builtin(attribute, type_str, frame), suggest_attribute_alternative(attribute, type_str, attributes), suggest_attribute_as_typo(attribute, attributes), suggest_attribute_as_special_case(attribute))
Example #9
Source File: app.py From quart with MIT License | 6 votes |
def preprocess_websocket( self, websocket_context: Optional[WebsocketContext] = None ) -> Optional[ResponseReturnValue]: """Preprocess the websocket i.e. call before_websocket functions. Arguments: websocket_context: The websocket context, optional as Flask omits this argument. """ websocket_ = (websocket_context or _websocket_ctx_stack.top).websocket blueprint = websocket_.blueprint processors = self.url_value_preprocessors[None] if blueprint is not None: processors = chain(processors, self.url_value_preprocessors[blueprint]) # type: ignore for processor in processors: processor(websocket_.endpoint, websocket_.view_args) functions = self.before_websocket_funcs[None] if blueprint is not None: functions = chain(functions, self.before_websocket_funcs[blueprint]) # type: ignore for function in functions: result = await function() if result is not None: return result return None
Example #10
Source File: berny.py From pyberny with Mozilla Public License 2.0 | 6 votes |
def __init__( self, geom, debug=False, restart=None, maxsteps=100, logger=None, **params ): self._debug = debug self._maxsteps = maxsteps self._converged = False self._n = 0 self._log = BernyAdapter(logger or log, {'step': self._n}) s = self._state = Berny.State() if restart: vars(s).update(restart) return s.geom = geom s.params = dict(chain(defaults.items(), params.items())) s.trust = s.params['trust'] s.coords = InternalCoords( s.geom, dihedral=s.params['dihedral'], superweakdih=s.params['superweakdih'] ) s.H = s.coords.hessian_guess(s.geom) s.weights = s.coords.weights(s.geom) s.future = Berny.Point(s.coords.eval_geom(s.geom), None, None) s.first = True for line in str(s.coords).split('\n'): self._log.info(line)
Example #11
Source File: app.py From quart with MIT License | 6 votes |
def update_template_context(self, context: dict) -> None: """Update the provided template context. This adds additional context from the various template context processors. Arguments: context: The context to update (mutate). """ processors = self.template_context_processors[None] if has_request_context(): blueprint = _request_ctx_stack.top.request.blueprint if blueprint is not None and blueprint in self.template_context_processors: processors = chain( # type: ignore processors, self.template_context_processors[blueprint] ) extra_context: dict = {} for processor in processors: extra_context.update(await processor()) original = context.copy() context.update(extra_context) context.update(original)
Example #12
Source File: EncodingDataParallel.py From torch-toolbox with BSD 3-Clause "New" or "Revised" License | 6 votes |
def forward(self, *inputs, **kwargs): if not self.device_ids: return self.module(*inputs, **kwargs) for t in chain(self.module.parameters(), self.module.buffers()): if t.device != self.src_device_obj: raise RuntimeError( "module must have its parameters and buffers " "on device {} (device_ids[0]) but found one of " "them on device: {}".format( self.src_device_obj, t.device)) inputs, kwargs = self.scatter(inputs, kwargs, self.device_ids) if len(self.device_ids) == 1: return self.module(*inputs, **kwargs) replicas = self.replicate(self.module, self.device_ids[:len(inputs)]) outputs = self.parallel_apply(replicas, inputs, kwargs) return outputs
Example #13
Source File: DDPAE.py From DDPAE-video-prediction with MIT License | 6 votes |
def setup_training(self): ''' Setup Pyro SVI, optimizers. ''' if not self.is_train: return self.pyro_optimizer = optim.Adam({'lr': self.lr_init}) self.svis = {'elbo': SVI(self.model, self.guide, self.pyro_optimizer, loss=Trace_ELBO())} # Separate pose_model parameters and other networks' parameters params = [] for name, net in self.nets.items(): if name != 'pose_model': params.append(net.parameters()) self.optimizer = torch.optim.Adam(\ [{'params': self.pose_model.parameters(), 'lr': self.lr_init}, {'params': itertools.chain(*params), 'lr': self.lr_init} ], betas=(0.5, 0.999))
Example #14
Source File: manager.py From zun with Apache License 2.0 | 6 votes |
def _wait_for_volumes_deleted(self, context, volmaps, container, timeout=60, poll_interval=1): start_time = time.time() try: volmaps = itertools.chain(volmaps) volmap = next(volmaps) while time.time() - start_time < timeout: if not volmap.auto_remove: volmap = next(volmaps) driver = self._get_driver(container) is_deleted, is_error = driver.is_volume_deleted( context, volmap) if is_deleted: volmap = next(volmaps) if is_error: break time.sleep(poll_interval) except StopIteration: return msg = _("Volumes cannot be successfully deleted after " "%d seconds") % (timeout) self._fail_container(context, container, msg, unset_host=True) raise exception.Conflict(msg)
Example #15
Source File: didyoumean_internal.py From DidYouMean-Python with MIT License | 5 votes |
def get_suggestions_for_exception(value, traceback): """Get suggestions for an exception.""" frame = get_last_frame(traceback) return itertools.chain.from_iterable( func(value, frame) for error_type, functions in SUGGESTION_FUNCTIONS.items() if isinstance(value, error_type) for func in functions)
Example #16
Source File: chomsky.py From razzy-spinner with GNU General Public License v3.0 | 5 votes |
def generate_chomsky(times=5, line_length=72): parts = [] for part in (leadins, subjects, verbs, objects): phraselist = list(map(str.strip, part.splitlines())) random.shuffle(phraselist) parts.append(phraselist) output = chain(*islice(izip(*parts), 0, times)) print(textwrap.fill(" ".join(output), line_length))
Example #17
Source File: model.py From me-ica with GNU Lesser General Public License v2.1 | 5 votes |
def build_command_line_string(self): optional_args = [arg.value for arg in self.optional_args] required_args = [c.value for c in self.required_args if c.commands] position_args = [c.value for c in self.required_args if not c.commands] if position_args: position_args.insert(0, "--") cmd_string = ' '.join(filter(None, chain(required_args, optional_args, position_args))) if self.layout_type == 'column': cmd_string = u'{} {}'.format(self.argument_groups[self.active_group].command, cmd_string) return u'{} --ignore-gooey {}'.format(self.build_spec['target'], cmd_string)
Example #18
Source File: collections.py From chainerrl with MIT License | 5 votes |
def __iter__(self): return itertools.chain(reversed(self._queue_front), iter(self._queue_back))
Example #19
Source File: model.py From me-ica with GNU Lesser General Public License v2.1 | 5 votes |
def group_arguments(self, widget_list): is_required = lambda widget: widget['required'] not_checkbox = lambda widget: widget['type'] != 'CheckBox' required_args, optional_args = self.partition(widget_list, is_required) if self.build_spec['group_by_type']: optional_args = chain(*self.partition(optional_args, not_checkbox)) return map(self.to_object, required_args), map(self.to_object, optional_args)
Example #20
Source File: load_w2v.py From tartarus with MIT License | 5 votes |
def build_vocab(sentences): """ Builds a vocabulary mapping from word to index based on the sentences. Returns vocabulary mapping and inverse vocabulary mapping. """ # Build vocabulary word_counts = Counter(itertools.chain(*sentences)) # Mapping from index to word vocabulary_inv = [x[0] for x in word_counts.most_common()] # Mapping from word to index vocabulary = {x: i for i, x in enumerate(vocabulary_inv)} return [vocabulary, vocabulary_inv]
Example #21
Source File: configparser.py From jawfish with MIT License | 5 votes |
def __iter__(self): # XXX does it break when underlying container state changed? return itertools.chain((self.default_section,), self._sections.keys())
Example #22
Source File: types.py From pyspark-cassandra with Apache License 2.0 | 5 votes |
def __hash__(self): h = 1 for v in chain(self.keys(), self.values()): h = 31 * h + hash(v) return h
Example #23
Source File: tests.py From pyspark-cassandra with Apache License 2.0 | 5 votes |
def test(self): rows = list(chain(*self.rows)) rows_by_key = {row['key'] : row for row in rows} self.sc \ .parallelize(rows) \ .saveToCassandra(self.keyspace, self.table) self.stream \ .joinWithCassandraTable(self.keyspace, self.table, ['text'], ['key']) \ .foreachRDD(self.checkRDD) self.ssc.start() self.ssc.awaitTermination((self.count + 1) * self.interval) self.ssc.stop(stopSparkContext=False, stopGraceFully=True) joined_rows = self.joined_rows.value self.assertEqual(len(joined_rows), len(rows)) for row in joined_rows: self.assertEqual(len(row), 2) left, right = row self.assertEqual(type(left), type(right)) self.assertEqual(rows_by_key[left['key']], left) self.assertEqual(left['text'], right['text']) self.assertEqual(len(right), 1)
Example #24
Source File: rouge.py From TransferRL with MIT License | 5 votes |
def _split_into_words(sentences): """Splits multiple sentences into words and flattens the result""" return list(itertools.chain(*[_.split(" ") for _ in sentences]))
Example #25
Source File: ssa.py From Twitch-Chat-Downloader with MIT License | 5 votes |
def generator(self) -> Generator[Tuple[str, Optional[Comment]], None, None]: """ Line generator :return: """ for line in chain(self.prefix(), self.dialogues(self.video.comments)): yield line
Example #26
Source File: heapq.py From jawfish with MIT License | 5 votes |
def nlargest(n, iterable, key=None): """Find the n largest elements in a dataset. Equivalent to: sorted(iterable, key=key, reverse=True)[:n] """ # Short-cut for n==1 is to use max() when len(iterable)>0 if n == 1: it = iter(iterable) head = list(islice(it, 1)) if not head: return [] if key is None: return [max(chain(head, it))] return [max(chain(head, it), key=key)] # When n>=size, it's faster to use sorted() try: size = len(iterable) except (TypeError, AttributeError): pass else: if n >= size: return sorted(iterable, key=key, reverse=True)[:n] # When key is none, use simpler decoration if key is None: it = zip(iterable, count(0,-1)) # decorate result = _nlargest(n, it) return [r[0] for r in result] # undecorate # General case, slowest method in1, in2 = tee(iterable) it = zip(map(key, in1), count(0,-1), in2) # decorate result = _nlargest(n, it) return [r[2] for r in result] # undecorate
Example #27
Source File: heapq.py From jawfish with MIT License | 5 votes |
def nsmallest(n, iterable, key=None): """Find the n smallest elements in a dataset. Equivalent to: sorted(iterable, key=key)[:n] """ # Short-cut for n==1 is to use min() when len(iterable)>0 if n == 1: it = iter(iterable) head = list(islice(it, 1)) if not head: return [] if key is None: return [min(chain(head, it))] return [min(chain(head, it), key=key)] # When n>=size, it's faster to use sorted() try: size = len(iterable) except (TypeError, AttributeError): pass else: if n >= size: return sorted(iterable, key=key)[:n] # When key is none, use simpler decoration if key is None: it = zip(iterable, count()) # decorate result = _nsmallest(n, it) return [r[0] for r in result] # undecorate # General case, slowest method in1, in2 = tee(iterable) it = zip(map(key, in1), count(), in2) # decorate result = _nsmallest(n, it) return [r[2] for r in result] # undecorate
Example #28
Source File: heapq.py From jawfish with MIT License | 5 votes |
def merge(*iterables): '''Merge multiple sorted inputs into a single sorted output. Similar to sorted(itertools.chain(*iterables)) but returns a generator, does not pull the data into memory all at once, and assumes that each of the input streams is already sorted (smallest to largest). >>> list(merge([1,3,5,7], [0,2,4,8], [5,10,15,20], [], [25])) [0, 1, 2, 3, 4, 5, 5, 7, 8, 10, 15, 20, 25] ''' _heappop, _heapreplace, _StopIteration = heappop, heapreplace, StopIteration _len = len h = [] h_append = h.append for itnum, it in enumerate(map(iter, iterables)): try: next = it.__next__ h_append([next(), itnum, next]) except _StopIteration: pass heapify(h) while _len(h) > 1: try: while True: v, itnum, next = s = h[0] yield v s[0] = next() # raises StopIteration when exhausted _heapreplace(h, s) # restore heap condition except _StopIteration: _heappop(h) # remove empty iterator if h: # fast case when only a single iterator remains v, itnum, next = h[0] yield v yield from next.__self__ # Extend the implementations of nsmallest and nlargest to use a key= argument
Example #29
Source File: configparser.py From jawfish with MIT License | 5 votes |
def _join_multiline_values(self): defaults = self.default_section, self._defaults all_sections = itertools.chain((defaults,), self._sections.items()) for section, options in all_sections: for name, val in options.items(): if isinstance(val, list): val = '\n'.join(val).rstrip() options[name] = self._interpolation.before_read(self, section, name, val)
Example #30
Source File: test_instruction_set.py From pyshgp with MIT License | 5 votes |
def all_core_instrucitons(core_type_lib): return set(chain( common.instructions(core_type_lib), io.instructions(core_type_lib), code.instructions(), numeric.instructions(), text.instructions(), logical.instructions(), ))