Python itertools.combinations() Examples

The following are 30 code examples for showing how to use itertools.combinations(). These examples are extracted from open source projects. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example.

You may check out the related API usage on the sidebar.

You may also want to check out all available functions/classes of the module itertools , or try the search function .

Example 1
Project: fuku-ml   Author: fukuball   File: RidgeRegression.py    License: MIT License 6 votes vote down vote up
def init_W(self, mode='normal'):

        self.W = {}

        if (self.status != 'load_train_data') and (self.status != 'train'):
            print("Please load train data first.")
            return self.W

        self.status = 'init'

        self.data_num = len(self.train_Y)
        self.data_demension = len(self.train_X[0])
        self.class_list = list(itertools.combinations(np.unique(self.train_Y), 2))

        for class_item in self.class_list:
            self.W[class_item] = np.zeros(self.data_demension)

        return self.W 
Example 2
Project: fuku-ml   Author: fukuball   File: KernelRidgeRegression.py    License: MIT License 6 votes vote down vote up
def init_W(self, mode='normal'):

        self.W = {}

        if (self.status != 'load_train_data') and (self.status != 'train'):
            print("Please load train data first.")
            return self.W

        self.status = 'init'

        self.data_num = len(self.train_Y)
        self.data_demension = len(self.train_X[0])
        self.class_list = list(itertools.combinations(np.unique(self.train_Y), 2))

        for class_item in self.class_list:
            self.W[class_item] = np.zeros(self.data_demension)

        return self.W 
Example 3
Project: fuku-ml   Author: fukuball   File: SupportVectorMachine.py    License: MIT License 6 votes vote down vote up
def init_W(self, mode='normal'):

        self.W = {}

        if (self.status != 'load_train_data') and (self.status != 'train'):
            print("Please load train data first.")
            return self.W

        self.status = 'init'

        self.data_num = len(self.train_Y)
        self.data_demension = len(self.train_X[0])
        self.class_list = list(itertools.combinations(np.unique(self.train_Y), 2))

        for class_item in self.class_list:
            self.W[class_item] = np.zeros(self.data_demension)

        return self.W 
Example 4
Project: fuku-ml   Author: fukuball   File: LinearRegression.py    License: MIT License 6 votes vote down vote up
def init_W(self, mode='normal'):

        self.W = {}

        if (self.status != 'load_train_data') and (self.status != 'train'):
            print("Please load train data first.")
            return self.W

        self.status = 'init'

        self.data_num = len(self.train_Y)
        self.data_demension = len(self.train_X[0])
        self.class_list = list(itertools.combinations(np.unique(self.train_Y), 2))

        for class_item in self.class_list:
            self.W[class_item] = np.zeros(self.data_demension)

        return self.W 
Example 5
Project: OpenFermion-Cirq   Author: quantumlib   File: swap_network_trotter.py    License: Apache License 2.0 6 votes vote down vote up
def params(self) -> Iterable[sympy.Symbol]:
        """The parameters of the ansatz."""
        for i in range(self.iterations):
            for p in range(len(self.qubits)):
                if (self.include_all_z or not
                        numpy.isclose(self.hamiltonian.one_body[p, p], 0)):
                    yield LetterWithSubscripts('U', p, i)
            for p, q in itertools.combinations(range(len(self.qubits)), 2):
                if (self.include_all_xxyy or not
                        numpy.isclose(self.hamiltonian.one_body[p, q].real, 0)):
                    yield LetterWithSubscripts('T', p, q, i)
                if (self.include_all_yxxy or not
                        numpy.isclose(self.hamiltonian.one_body[p, q].imag, 0)):
                    yield LetterWithSubscripts('W', p, q, i)
                if (self.include_all_cz or not
                        numpy.isclose(self.hamiltonian.two_body[p, q], 0)):
                    yield LetterWithSubscripts('V', p, q, i) 
Example 6
Project: razzy-spinner   Author: rafasashi   File: collocations.py    License: GNU General Public License v3.0 6 votes vote down vote up
def from_words(cls, words, window_size=3):
        """Construct a TrigramCollocationFinder for all trigrams in the given
        sequence.
        """
        if window_size < 3:
            raise ValueError("Specify window_size at least 3")

        wfd = FreqDist()
        wildfd = FreqDist()
        bfd = FreqDist()
        tfd = FreqDist()
        for window in ngrams(words, window_size, pad_right=True):
            w1 = window[0]
            if w1 is None:
                continue
            for w2, w3 in _itertools.combinations(window[1:], 2):
                wfd[w1] += 1
                if w2 is None:
                    continue
                bfd[(w1, w2)] += 1
                if w3 is None:
                    continue
                wildfd[(w1, w3)] += 1
                tfd[(w1, w2, w3)] += 1
        return cls(wfd, bfd, wildfd, tfd) 
Example 7
Project: Localization   Author: kamalshadi   File: find_centroid.py    License: MIT License 6 votes vote down vote up
def canInd(P, ni):
    l = len(P)
    ind = range(l)
    if ni < 2:
        return [[xx] for xx in ind]
    if ni >= l:
        return [ind]
    im = intersection_matrix(P)
    can = []
    for w in combinations(ind, ni):
        fg = True
        for i in w:
            for j in w:
                if im[i, j] == 0:
                    fg = False
                    break
            if not fg:
                break
        if fg:
            can.append(list(w))
    return can 
Example 8
Project: ConvLab   Author: ConvLab   File: Features.py    License: MIT License 6 votes vote down vote up
def get_ngrams(sentence, max_length, skip_ngrams=False, add_tags = True):
    # return ngrams of length up to max_length as found in sentence.
    out = []
    words = sentence.split()
    if add_tags :
        words = ["<s>"]+words+["</s>"]
    if not skip_ngrams :
        for i in range(len(words)):
            for n in range(1,min(max_length+1, len(words)-i+1)): 
                this_ngram = " ".join(words[i:i+n])
                out.append((this_ngram,[]))
    else :
        for n in range(1, max_length+1):
            subsets = set(itertools.combinations(range(len(words)), n))
            for subset in subsets:
                subset = sorted(subset)
                dists = [(subset[i]-subset[i-1]) for i in range(1, len(subset))]
                out.append((" ".join([words[j] for j in subset]), dists))
            
        
    return out 
Example 9
Project: pyGSTi   Author: pyGSTio   File: qubitgraph.py    License: Apache License 2.0 6 votes vote down vote up
def connected_combos(self, possible_nodes, size):
        """
        Computes the number of different connected subsets of `possible_nodes`
        containing `size` nodes.

        Parameters
        ----------
        possible_nodes : list
            A list of node (qubit) labels.

        size : int
            The size of the connected subsets being sought (counted).

        Returns
        -------
        int
        """
        count = 0
        for selected_nodes in _itertools.combinations(possible_nodes, size):
            if self.are_glob_connected(selected_nodes): count += 1
        return count 
Example 10
Project: pyGSTi   Author: pyGSTio   File: processorspec.py    License: Apache License 2.0 6 votes vote down vote up
def get_all_connected_sets(self, n):
        """
        Returns all connected sets of `n` qubits. Note that for a large device with
        this will be often be an unreasonably large number of sets of qubits, and so
        the run-time of this method will be unreasonable.

        Parameters
        ----------
        n: int
            The number of qubits within each set.

        Returns
        -------
        list
            All sets of `n` connected qubits.

        """
        connectedqubits = []
        for combo in _iter.combinations(self.qubit_labels, n):
            if self.qubitgraph.subgraph(list(combo)).are_glob_connected(combo):
                connectedqubits.append(combo)

        return connectedqubits

    #Note:  Below method gets all subgraphs up to full graph size. 
Example 11
Project: pyGSTi   Author: pyGSTio   File: randomcircuit.py    License: Apache License 2.0 6 votes vote down vote up
def find_all_sets_of_compatible_twoQgates(edgelist, n, gatename='Gcnot', aslabel=False):
    """
    todo.

    n : int . the number of two-qubit gates to have in the set.

    """
    co2Qgates = []

    # Go for all combinations of n two-qubit gates from the edgelist.
    for npairs in _itertools.combinations(edgelist, n):

        # Make a list of the qubits involved in the gates
        flat_list = [item for sublist in npairs for item in sublist]

        # If no qubit is involved in more than one gate we accept the combination
        if len(flat_list) == len(set(flat_list)):
            if aslabel:
                co2Qgates.append([_lbl.Label(gatename, pair) for pair in npairs])
            else:
                co2Qgates.append([gatename + ':' + pair[0] + ':' + pair[1] for pair in npairs])

    return co2Qgates 
Example 12
Project: pyGSTi   Author: pyGSTio   File: nqubitconstruction.py    License: Apache License 2.0 6 votes vote down vote up
def connected_combos(self, possible_indices, size):
        count = 0
        for selected_inds in _itertools.combinations(possible_indices, size):
            if self.are_connected(selected_inds): count += 1
        return count

#     def remove(self, node):
#         """ Remove all references to node """
#         for n, cxns in self._graph.iteritems():
#             try:
#                 cxns.remove(node)
#             except KeyError:
#                 pass
#         try:
#             del self._graph[node]
#         except KeyError:
#             pass 
Example 13
Project: qutebrowser   Author: qutebrowser   File: configtypes.py    License: GNU General Public License v3.0 6 votes vote down vote up
def complete(self) -> _Completions:
        valid_values = self.valtype.valid_values
        if valid_values is None:
            return None

        out = []
        # Single value completions
        for value in valid_values:
            desc = valid_values.descriptions.get(value, "")
            out.append((json.dumps([value]), desc))

        combinables = self.combinable_values
        if combinables is None:
            combinables = list(valid_values)
        # Generate combinations of each possible value combination
        for size in range(2, len(combinables) + 1):
            for combination in itertools.combinations(combinables, size):
                out.append((json.dumps(combination), ''))
        return out 
Example 14
Project: speech_separation   Author: bill9800   File: build_audio_database.py    License: MIT License 6 votes vote down vote up
def generate_dataset(sample_range,repo_path,num_speaker=2):
    '''
    A function to generate dataset
    :param sample_range: range of the sample to create the dataset
    :param repo_path: audio repository
    :param num_speaker: number of speaker to separate
    :return: X_data, y_data
    '''
    audio_path_list = generate_path_list(sample_range,repo_path)
    num_data = 0

    combinations = itertools.combinations(audio_path_list,num_speaker)
    for combo in combinations:
        num_data += 1
        generate_mix_sample(combo,num_speaker)

    print('number of the data generated: ',num_data) 
Example 15
Project: gftools   Author: googlefonts   File: fonts-subset-support.py    License: Apache License 2.0 6 votes vote down vote up
def _LeastSimilarCoverage(files, subset):
  """Returns pair of fonts having inconsistent coverage for a subset.

  Args:
    files: List of font files
    subset: Name of subset
  Returns:
    3 tuple of (file1, file2, number of codepoints difference)
  """
  worst = (None, None, 0)
  subsetcps = fonts.CodepointsInSubset(subset, True)
  for pair in itertools.combinations(files, 2):
    inconsistency = _InconsistentSubsetSupport(pair[0], pair[1], subsetcps)
    if inconsistency > worst[2]:
      worst = (pair[0], pair[1], inconsistency)
  return worst 
Example 16
Project: whynot   Author: zykls   File: causal_search.py    License: MIT License 6 votes vote down vote up
def _find_skeleton(self, data, variable_types):
        """
        For each pair of nodes, run a conditional independence test over
        larger and larger conditioning sets to try to find a set that
        d-separates the pair.  If such a set exists, cut the edge between
        the nodes.  If not, keep the edge.
        """
        self.separating_sets = {}
        if not self.max_k:
            self.max_k = len(self._g.nodes) + 1
        for N in range(self.max_k + 1):
            for (x, y) in list(self._g.edges()):
                x_neighbors = list(self._g.neighbors(x))
                y_neighbors = list(self._g.neighbors(y))
                z_candidates = list(set(x_neighbors + y_neighbors) - set([x, y]))
                for z in itertools.combinations(z_candidates, N):
                    test = self.independence_test([y], [x], list(z), data, self.alpha)
                    if test.independent():
                        self._g.remove_edge(x, y)
                        self.separating_sets[(x, y)] = z
                        break 
Example 17
Project: deep_architect   Author: negrinho   File: nasbench_space.py    License: MIT License 6 votes vote down vote up
def create_cell_generator(num_nodes):
    h_connections = [
        Bool(name='in_%d_%d' % (in_id, out_id))
        for (in_id, out_id) in itertools.combinations(range(num_nodes + 2), 2)
    ]

    cell_ops = [
        D(['conv1', 'conv3', 'max3'], name='node_%d' % i)
        for i in range(num_nodes)
    ]

    def generate(filters):
        return cell(
            lambda channels: mo.siso_sequential(
                [conv2d(D([channels]), D([1])),
                 batch_normalization(),
                 relu()]), lambda num_inputs, node_id, channels:
            intermediate_node_fn(num_inputs, node_id, channels, cell_ops),
            concat, h_connections, 5, filters)

    return generate 
Example 18
Project: recruit   Author: Frank-qlu   File: test_numeric.py    License: Apache License 2.0 6 votes vote down vote up
def test_count_nonzero_axis_consistent(self):
        # Check that the axis behaviour for valid axes in
        # non-special cases is consistent (and therefore
        # correct) by checking it against an integer array
        # that is then casted to the generic object dtype
        from itertools import combinations, permutations

        axis = (0, 1, 2, 3)
        size = (5, 5, 5, 5)
        msg = "Mismatch for axis: %s"

        rng = np.random.RandomState(1234)
        m = rng.randint(-100, 100, size=size)
        n = m.astype(object)

        for length in range(len(axis)):
            for combo in combinations(axis, length):
                for perm in permutations(combo):
                    assert_equal(
                        np.count_nonzero(m, axis=perm),
                        np.count_nonzero(n, axis=perm),
                        err_msg=msg % (perm,)) 
Example 19
Project: twisst   Author: simonhmartin   File: twisst.py    License: GNU General Public License v3.0 6 votes vote down vote up
def makeTopoDict(taxonNames, topos=None, outgroup = None):
    output = {}
    output["topos"] = allTopos(taxonNames, []) if topos is None else topos
    if outgroup:
        for topo in output["topos"]: topo.set_outgroup(outgroup)
    output["n"] = len(output["topos"])
    pairs = list(itertools.combinations(taxonNames,2))
    pairsNumeric = list(itertools.combinations(range(len(taxonNames)),2))
    output["pairsOfPairs"] = [y for y in itertools.combinations(pairs,2) if pairsDisjoint(y[0],y[1])]
    output["pairsOfPairsNumeric"] = [y for y in itertools.combinations(pairsNumeric,2) if pairsDisjoint(y[0],y[1])]
    output["chainsDisjoint"] = []
    for tree in output["topos"]:
        rootLeafChains = makeRootLeafChainDict(tree)
        leafLeafChains = makeLeafLeafChainDict(rootLeafChains, pairs)
        for pair in pairs: leafLeafChains[pair[0]][pair[1]].setSet()
        output["chainsDisjoint"].append(checkDisjointChains(leafLeafChains, output["pairsOfPairs"]))
    return output 
Example 20
Project: python-shamir-mnemonic   Author: trezor   File: test_shamir.py    License: MIT License 5 votes vote down vote up
def test_group_sharing():
    group_threshold = 2
    group_sizes = (5, 3, 5, 1)
    member_thresholds = (3, 2, 2, 1)
    mnemonics = shamir.generate_mnemonics(
        group_threshold, list(zip(member_thresholds, group_sizes)), MS
    )

    # Test all valid combinations of mnemonics.
    for groups in combinations(zip(mnemonics, member_thresholds), group_threshold):
        for group1_subset in combinations(groups[0][0], groups[0][1]):
            for group2_subset in combinations(groups[1][0], groups[1][1]):
                mnemonic_subset = list(group1_subset + group2_subset)
                shuffle(mnemonic_subset)
                assert MS == shamir.combine_mnemonics(mnemonic_subset)

    # Minimal sets of mnemonics.
    assert MS == shamir.combine_mnemonics(
        [mnemonics[2][0], mnemonics[2][2], mnemonics[3][0]]
    )
    assert MS == shamir.combine_mnemonics(
        [mnemonics[2][3], mnemonics[3][0], mnemonics[2][4]]
    )

    # One complete group and one incomplete group out of two groups required.
    with pytest.raises(MnemonicError):
        shamir.combine_mnemonics(mnemonics[0][2:] + [mnemonics[1][0]])

    # One group of two required.
    with pytest.raises(MnemonicError):
        shamir.combine_mnemonics(mnemonics[0][1:4]) 
Example 21
Project: python-shamir-mnemonic   Author: trezor   File: test_shamir.py    License: MIT License 5 votes vote down vote up
def test_group_sharing_threshold_1():
    group_threshold = 1
    group_sizes = (5, 3, 5, 1)
    member_thresholds = (3, 2, 2, 1)
    mnemonics = shamir.generate_mnemonics(
        group_threshold, list(zip(member_thresholds, group_sizes)), MS
    )

    # Test all valid combinations of mnemonics.
    for group, threshold in zip(mnemonics, member_thresholds):
        for group_subset in combinations(group, threshold):
            mnemonic_subset = list(group_subset)
            shuffle(mnemonic_subset)
            assert MS == shamir.combine_mnemonics(mnemonic_subset) 
Example 22
Project: icme2019   Author: ShenDezhou   File: interaction.py    License: MIT License 5 votes vote down vote up
def call(self, inputs, **kwargs):

        if K.ndim(inputs[0]) != 3:
            raise ValueError(
                "Unexpected inputs dimensions %d, expect to be 3 dimensions" % (K.ndim(inputs)))

        embeds_vec_list = inputs
        row = []
        col = []

        for r, c in itertools.combinations(embeds_vec_list, 2):
            row.append(r)
            col.append(c)

        p = tf.concat(row, axis=1)
        q = tf.concat(col, axis=1)
        inner_product = p * q

        bi_interaction = inner_product
        attention_temp = tf.nn.relu(tf.nn.bias_add(tf.tensordot(
            bi_interaction, self.attention_W, axes=(-1, 0)), self.attention_b))
        #  Dense(self.attention_factor,'relu',kernel_regularizer=l2(self.l2_reg_w))(bi_interaction)
        self.normalized_att_score = tf.nn.softmax(tf.tensordot(
            attention_temp, self.projection_h, axes=(-1, 0)), dim=1)
        attention_output = tf.reduce_sum(
            self.normalized_att_score*bi_interaction, axis=1)

        attention_output = tf.nn.dropout(
            attention_output, self.keep_prob, seed=1024)
        # Dropout(1-self.keep_prob)(attention_output)
        afm_out = tf.tensordot(
            attention_output, self.projection_p, axes=(-1, 0))

        return afm_out 
Example 23
Project: fuku-ml   Author: fukuball   File: PLA.py    License: MIT License 5 votes vote down vote up
def init_W(self, mode='normal'):

        self.W = {}

        if (self.status != 'load_train_data') and (self.status != 'train'):
            print("Please load train data first.")
            return self.W

        self.status = 'init'

        self.data_num = len(self.train_Y)
        self.data_demension = len(self.train_X[0])
        self.class_list = list(itertools.combinations(np.unique(self.train_Y), 2))

        for class_item in self.class_list:
            self.W[class_item] = np.zeros(self.data_demension)

        if mode == 'linear_regression_accelerator':
            accelerator = linear_regression.Accelerator()
            for class_item in self.class_list:
                modify_X, modify_Y = utility.DatasetLoader.modify_XY(self.train_X, self.train_Y, class_item)
                self.temp_train_X = self.train_X
                self.temp_train_Y = self.train_Y
                self.train_X = modify_X
                self.train_Y = modify_Y
                self.temp_data_num = self.data_num
                self.data_num = len(self.train_Y)
                self.temp_W = self.W
                self.W = self.temp_W[class_item]
                self.temp_W[class_item] = accelerator.init_W(self)
                self.train_X = self.temp_train_X
                self.train_Y = self.temp_train_Y
                self.temp_train_X = []
                self.temp_train_Y = []
                self.data_num = self.temp_data_num
                self.temp_data_num = 0
                self.W = self.temp_W
                self.temp_W = {}

        return self.W 
Example 24
Project: fuku-ml   Author: fukuball   File: PocketPLA.py    License: MIT License 5 votes vote down vote up
def init_W(self, mode='normal'):

        self.W = {}

        if (self.status != 'load_train_data') and (self.status != 'train'):
            print("Please load train data first.")
            return self.W

        self.status = 'init'

        self.data_num = len(self.train_Y)
        self.data_demension = len(self.train_X[0])
        self.class_list = list(itertools.combinations(np.unique(self.train_Y), 2))

        for class_item in self.class_list:
            self.W[class_item] = np.zeros(self.data_demension)

        if mode == 'linear_regression_accelerator':
            accelerator = linear_regression.Accelerator()
            for class_item in self.class_list:
                modify_X, modify_Y = utility.DatasetLoader.modify_XY(self.train_X, self.train_Y, class_item)
                self.temp_train_X = self.train_X
                self.temp_train_Y = self.train_Y
                self.train_X = modify_X
                self.train_Y = modify_Y
                self.temp_data_num = self.data_num
                self.data_num = len(self.train_Y)
                self.temp_W = self.W
                self.W = self.temp_W[class_item]
                self.temp_W[class_item] = accelerator.init_W(self)
                self.train_X = self.temp_train_X
                self.train_Y = self.temp_train_Y
                self.temp_train_X = []
                self.temp_train_Y = []
                self.data_num = self.temp_data_num
                self.temp_data_num = 0
                self.W = self.temp_W
                self.temp_W = {}

        return self.W 
Example 25
Project: OpenFermion-Cirq   Author: quantumlib   File: fermionic_simulation.py    License: Apache License 2.0 5 votes vote down vote up
def fermionic_simulation_gates_from_interaction_operator(
        operator: openfermion.InteractionOperator):
    r"""
    Given $H = \sum_{I \subset [n]} H_I$, returns gates
    $\left\{G_I\right\} = \left\{e^{i H_I\right\}$.

    Each term $H_I$ is the sum of all terms in $H$ that involve exactly the
    orbitals $I$.

    Args:
        operator: The interaction operator ($H$).

    Returns: A dict from tuples of mode indices to gates.
    """
    n_qubits = operator.n_qubits

    gates: Dict[Tuple[int, ...], cirq.Gate] = {}

    if operator.constant:
        gates[()] = operator.constant
    for p in range(n_qubits):
        coeff = operator.one_body_tensor[p, p]
        if coeff:
            gates[(p,)] = cirq.Z**(coeff / np.pi)
    for modes in itertools.combinations(range(n_qubits), 2):
        gate: Optional[InteractionOperatorFermionicGate] = (
            QuadraticFermionicSimulationGate.from_interaction_operator(
                operator=operator, modes=modes))
        if gate:
            gates[modes] = gate
    for modes in itertools.combinations(range(n_qubits), 3):
        gate = CubicFermionicSimulationGate.from_interaction_operator(
            operator=operator, modes=modes)
        if gate:
            gates[modes] = gate
    for modes in itertools.combinations(range(n_qubits), 4):
        gate = QuarticFermionicSimulationGate.from_interaction_operator(
            operator=operator, modes=modes)
        if gate:
            gates[modes] = gate
    return gates 
Example 26
Project: OpenFermion-Cirq   Author: quantumlib   File: fermionic_simulation_test.py    License: Apache License 2.0 5 votes vote down vote up
def test_weights_and_exponent(weights):
    exponents = np.linspace(-1, 1, 8)
    gates = tuple(
        ofc.QuarticFermionicSimulationGate(
            weights / exponent, exponent=exponent, absorb_exponent=True)
        for exponent in exponents)

    for g1, g2 in itertools.combinations(gates, 2):
        assert cirq.approx_eq(g1, g2, atol=1e-100)

    for i, (gate, exponent) in enumerate(zip(gates, exponents)):
        assert gate.exponent == 1
        new_exponent = exponents[-i]
        new_gate = gate._with_exponent(new_exponent)
        assert new_gate.exponent == new_exponent 
Example 27
Project: OpenFermion-Cirq   Author: quantumlib   File: low_rank.py    License: Apache License 2.0 5 votes vote down vote up
def params(self) -> Iterable[sympy.Symbol]:
        """The parameters of the ansatz."""

        for i in range(self.iterations):

            for p in range(len(self.qubits)):
                # One-body energies
                if (self.include_all_z or not numpy.isclose(
                        self.one_body_energies[p], 0)):
                    yield LetterWithSubscripts('U', p, i)
                # Diagonal two-body coefficients for each singular vector
                for j in range(len(self.eigenvalues)):
                    two_body_coefficients = (
                            self.scaled_density_density_matrices[j])
                    if (self.include_all_z or not numpy.isclose(
                            two_body_coefficients[p, p], 0)):
                        yield LetterWithSubscripts('U', p, j, i)

            for p, q in itertools.combinations(range(len(self.qubits)), 2):
                # Off-diagonal two-body coefficients for each singular vector
                for j in range(len(self.eigenvalues)):
                    two_body_coefficients = (
                            self.scaled_density_density_matrices[j])
                    if (self.include_all_cz or not numpy.isclose(
                            two_body_coefficients[p, q], 0)):
                        yield LetterWithSubscripts('V', p, q, j, i) 
Example 28
Project: OpenFermion-Cirq   Author: quantumlib   File: split_operator_trotter.py    License: Apache License 2.0 5 votes vote down vote up
def params(self) -> Iterable[sympy.Symbol]:
        """The names of the parameters of the ansatz."""
        for i in range(self.iterations):
            for p in range(len(self.qubits)):
                if (self.include_all_z or not
                        numpy.isclose(self.orbital_energies[p], 0)):
                    yield LetterWithSubscripts('U', p, i)
            for p, q in itertools.combinations(range(len(self.qubits)), 2):
                if (self.include_all_cz or not
                        numpy.isclose(self.hamiltonian.two_body[p, q], 0)):
                    yield LetterWithSubscripts('V', p, q, i) 
Example 29
Project: grimoirelab-sortinghat   Author: chaoss   File: grimoirelab.py    License: GNU General Public License v3.0 5 votes vote down vote up
def __validate_enrollment_periods(self, enrollments):
        """Check for overlapped periods in the enrollments"""

        for a, b in itertools.combinations(enrollments, 2):

            max_start = max(a.start, b.start)
            min_end = min(a.end, b.end)

            if max_start < min_end:
                msg = "invalid GrimoireLab enrollment dates. " \
                      "Organization dates overlap."
                raise InvalidFormatError(cause=msg)

        return enrollments 
Example 30
Project: pymoo   Author: msu-coinlab   File: misc.py    License: Apache License 2.0 5 votes vote down vote up
def powerset(iterable):
    for n in range(len(iterable) + 1):
        yield from combinations(iterable, n)