Python sklearn.metrics.pairwise.rbf_kernel() Examples

The following are code examples for showing how to use sklearn.metrics.pairwise.rbf_kernel(). They are from open source Python projects. You can vote up the examples you like or vote down the ones you don't like.

Example 1
Project: MKLpy   Author: IvanoLauriola   File: komd.py    GNU General Public License v3.0 6 votes vote down vote up
def __kernel_definition__(self):
        """Select the kernel function
        
        Returns
        -------
        kernel : a callable relative to selected kernel
        """
        if hasattr(self.kernel, '__call__'):
            return self.kernel
        if self.kernel == 'rbf' or self.kernel == None:
            return lambda X,Y : rbf_kernel(X,Y,self.rbf_gamma)
        if self.kernel == 'poly':
            return lambda X,Y : polynomial_kernel(X, Y, degree=self.degree, gamma=self.rbf_gamma, coef0=self.coef0)
        if self.kernel == 'linear':
            return lambda X,Y : linear_kernel(X,Y)
        if self.kernel == 'precomputed':
            return lambda X,Y : X 
Example 2
Project: TextCategorization   Author: Y-oHr-N   File: multiclass.py    MIT License 6 votes vote down vote up
def _build_graph(self):
        """Compute the graph Laplacian."""

        # Graph sparsification
        if self.sparsify == 'epsilonNN':
            self.A_           = radius_neighbors_graph(self.X_, self.radius, include_self=False)
        else:
            Q                 = kneighbors_graph(
                self.X_,
                self.n_neighbors,
                include_self  = False
            ).astype(np.bool)

            if self.sparsify   == 'kNN':
                self.A_       = (Q + Q.T).astype(np.float64)
            elif self.sparsify == 'MkNN':
                self.A_       = (Q.multiply(Q.T)).astype(np.float64)

        # Edge re-weighting
        if self.reweight == 'rbf':
            W                 = rbf_kernel(self.X_, gamma=self.t)
            self.A_           = self.A_.multiply(W)

        return sp.csgraph.laplacian(self.A_, normed=self.normed) 
Example 3
Project: Scuba   Author: gzampieri   File: compute_kernel.py    GNU General Public License v2.0 6 votes vote down vote up
def get_RBF(A, s=1.):
    """ Compute radial basis function kernel.
    
    Parameters:
        A -- Feature matrix.
        s -- Scale parameter (positive float, 1.0 by default).
        
    Return:
        K -- Radial basis function kernel matrix.
    """
    
    from sklearn.metrics.pairwise import euclidean_distances, rbf_kernel
    from sklearn.preprocessing import scale
    
    A = scale(A)
    dist_matrix = euclidean_distances(A, A, None, squared=True)
    dist_vector = dist_matrix[np.nonzero(np.tril(dist_matrix))]
    dist_median = np.median(dist_vector)
    K = rbf_kernel(A, None, dist_median*s)
    
    return K 
Example 4
Project: numpy-ml   Author: ddbourgin   File: tests.py    GNU General Public License v3.0 6 votes vote down vote up
def test_radial_basis_kernel():
    while True:
        N = np.random.randint(1, 100)
        M = np.random.randint(1, 100)
        C = np.random.randint(1, 1000)
        gamma = np.random.rand()

        X = np.random.rand(N, C)
        Y = np.random.rand(M, C)

        # sklearn (gamma) <-> mine (sigma) conversion:
        # gamma = 1 / (2 * sigma^2)
        # sigma = np.sqrt(1 / 2 * gamma)

        mine = RBFKernel(sigma=np.sqrt(1 / (2 * gamma)))(X, Y)
        gold = sk_rbf(X, Y, gamma=gamma)

        np.testing.assert_almost_equal(mine, gold)
        print("PASSED")


#######################################################################
#                          Distance Metrics                           #
####################################################################### 
Example 5
Project: multiview   Author: NeuroDataDesign   File: test_spectral.py    Apache License 2.0 6 votes vote down vote up
def test_gaussian_sim(data):

    v1_data = data['fit_data'][0]
    distances = cdist(v1_data, v1_data)
    gamma = 1/ (2 * np.median(distances) **2)
    true_kernel = rbf_kernel(v1_data, v1_data, gamma)
    spectral = data['spectral']
    g_kernel = spectral._gaussian_sim(v1_data)

    assert(g_kernel.shape[0] == data['n_fit'])
    assert(g_kernel.shape[1] == data['n_fit'])
    
    for ind1 in range(g_kernel.shape[0]):
        for ind2 in range(g_kernel.shape[1]):
            assert np.abs(true_kernel[ind1][ind2]
                          - g_kernel[ind1][ind2]) < 0.000001 
Example 6
Project: multiview   Author: NeuroDataDesign   File: test_spectral.py    Apache License 2.0 6 votes vote down vote up
def test_compute_eigs(data):

    v1_data = data['fit_data'][0]
    distances = cdist(v1_data, v1_data)
    gamma = 1/ (2 * np.median(distances) **2)
    g_kernel = rbf_kernel(v1_data, v1_data, gamma)
    n_clusts = data['n_clusters']
    n_fit = data['n_fit']
    
    spectral = data['spectral']
    eigs = spectral._compute_eigs(g_kernel)
    
    assert(eigs.shape[0] == n_fit)
    assert(eigs.shape[1] == n_clusts)

    col_mags = np.linalg.norm(eigs, axis=0)
    
    for val in col_mags:
        assert(np.abs(val - 1) < 0.000001) 
Example 7
Project: linear_neuron   Author: uglyboxer   File: test_svm.py    MIT License 6 votes vote down vote up
def test_svr_decision_function():
    # Test SVR's decision_function
    # Sanity check, test that decision_function implemented in python
    # returns the same as the one in libsvm

    X = iris.data
    y = iris.target

    # linear kernel
    reg = svm.SVR(kernel='linear', C=0.1).fit(X, y)

    dec = np.dot(X, reg.coef_.T) + reg.intercept_
    assert_array_almost_equal(dec.ravel(), reg.decision_function(X).ravel())

    # rbf kernel
    reg = svm.SVR(kernel='rbf', gamma=1).fit(X, y)
    
    rbfs = rbf_kernel(X, reg.support_vectors_, gamma=reg.gamma)
    dec = np.dot(rbfs, reg.dual_coef_.T) + reg.intercept_
    assert_array_almost_equal(dec.ravel(), reg.decision_function(X).ravel()) 
Example 8
Project: linear_neuron   Author: uglyboxer   File: test_kernel_approximation.py    MIT License 6 votes vote down vote up
def test_rbf_sampler():
    # test that RBFSampler approximates kernel on random data
    # compute exact kernel
    gamma = 10.
    kernel = rbf_kernel(X, Y, gamma=gamma)

    # approximate kernel mapping
    rbf_transform = RBFSampler(gamma=gamma, n_components=1000, random_state=42)
    X_trans = rbf_transform.fit_transform(X)
    Y_trans = rbf_transform.transform(Y)
    kernel_approx = np.dot(X_trans, Y_trans.T)

    error = kernel - kernel_approx
    assert_less_equal(np.abs(np.mean(error)), 0.01)  # close to unbiased
    np.abs(error, out=error)
    assert_less_equal(np.max(error), 0.1)  # nothing too far off
    assert_less_equal(np.mean(error), 0.05)  # mean is fairly close 
Example 9
Project: RGAN   Author: ratschlab   File: data_utils.py    MIT License 6 votes vote down vote up
def GP(seq_length=30, num_samples=28*5*100, num_signals=1, scale=0.1, kernel='rbf', **kwargs):
    # the shape of the samples is num_samples x seq_length x num_signals
    samples = np.empty(shape=(num_samples, seq_length, num_signals))
    #T = np.arange(seq_length)/seq_length    # note, between 0 and 1
    T = np.arange(seq_length)    # note, not between 0 and 1
    if kernel == 'periodic':
        cov = periodic_kernel(T)
    elif kernel =='rbf':
        cov = rbf_kernel(T.reshape(-1, 1), gamma=scale)
    else:
        raise NotImplementedError
    # scale the covariance
    cov *= 0.2
    # define the distribution
    mu = np.zeros(seq_length)
    print(np.linalg.det(cov))
    distribution = multivariate_normal(mean=np.zeros(cov.shape[0]), cov=cov)
    pdf = distribution.logpdf
    # now generate samples
    for i in range(num_signals):
        samples[:, :, i] = distribution.rvs(size=num_samples)
    return samples, pdf 
Example 10
Project: Weiss   Author: WangWenjun559   File: test_spectral_embedding.py    Apache License 2.0 6 votes vote down vote up
def test_spectral_embedding_callable_affinity(seed=36):
    # Test spectral embedding with callable affinity
    gamma = 0.9
    kern = rbf_kernel(S, gamma=gamma)
    se_callable = SpectralEmbedding(n_components=2,
                                    affinity=(
                                        lambda x: rbf_kernel(x, gamma=gamma)),
                                    gamma=gamma,
                                    random_state=np.random.RandomState(seed))
    se_rbf = SpectralEmbedding(n_components=2, affinity="rbf",
                               gamma=gamma,
                               random_state=np.random.RandomState(seed))
    embed_rbf = se_rbf.fit_transform(S)
    embed_callable = se_callable.fit_transform(S)
    assert_array_almost_equal(
        se_callable.affinity_matrix_, se_rbf.affinity_matrix_)
    assert_array_almost_equal(kern, se_rbf.affinity_matrix_)
    assert_true(
        _check_with_col_sign_flipping(embed_rbf, embed_callable, 0.05)) 
Example 11
Project: Weiss   Author: WangWenjun559   File: test_svm.py    Apache License 2.0 6 votes vote down vote up
def test_svr_decision_function():
    # Test SVR's decision_function
    # Sanity check, test that decision_function implemented in python
    # returns the same as the one in libsvm

    X = iris.data
    y = iris.target

    # linear kernel
    reg = svm.SVR(kernel='linear', C=0.1).fit(X, y)

    dec = np.dot(X, reg.coef_.T) + reg.intercept_
    assert_array_almost_equal(dec.ravel(), reg.decision_function(X).ravel())

    # rbf kernel
    reg = svm.SVR(kernel='rbf', gamma=1).fit(X, y)

    rbfs = rbf_kernel(X, reg.support_vectors_, gamma=reg.gamma)
    dec = np.dot(rbfs, reg.dual_coef_.T) + reg.intercept_
    assert_array_almost_equal(dec.ravel(), reg.decision_function(X).ravel()) 
Example 12
Project: scanorama   Author: brianhie   File: scanorama.py    MIT License 5 votes vote down vote up
def batch_bias(curr_ds, match_ds, bias, batch_size=None, sigma=SIGMA):
    if batch_size is None:
        weights = rbf_kernel(curr_ds, match_ds, gamma=0.5*sigma)
        weights = normalize(weights, axis=1, norm='l1')
        avg_bias = np.dot(weights, bias)
        return avg_bias

    base = 0
    avg_bias = np.zeros(curr_ds.shape)
    denom = np.zeros(curr_ds.shape[0])
    while base < match_ds.shape[0]:
        batch_idx = range(
            base, min(base + batch_size, match_ds.shape[0])
        )
        weights = rbf_kernel(curr_ds, match_ds[batch_idx, :],
                             gamma=0.5*sigma)
        avg_bias += np.dot(weights, bias[batch_idx, :])
        denom += np.sum(weights, axis=1)
        base += batch_size

    denom = handle_zeros_in_scale(denom, copy=False)
    avg_bias /= denom[:, np.newaxis]

    return avg_bias

# Compute nonlinear translation vectors between dataset
# and a reference. 
Example 13
Project: TextCategorization   Author: Y-oHr-N   File: laplacian_rls.py    MIT License 5 votes vote down vote up
def fit(self, X, y, L):
        """Fit the model according to the given training data.

        Prameters
        ---------
        X : array-like, shpae = [n_samples, n_features]
            Training data.

        y : array-like, shpae = [n_samples]
            Target values (unlabeled points are marked as 0).

        L : array-like, shpae = [n_samples, n_samples]
            Graph Laplacian.
        """

        labeled               = y != 0
        y_labeled             = y[labeled]
        n_samples, n_features = X.shape
        n_labeled_samples     = y_labeled.size
        I                     = sp.eye(n_samples)
        J                     = sp.diags(labeled.astype(np.float64))
        K                     = rbf_kernel(X, gamma=self.gamma_k)
        M                     = J @ K \
            + self.gamma_a * n_labeled_samples * I \
            + self.gamma_i * n_labeled_samples / n_samples**2 * L**self.p @ K

        # Train a classifer
        self.dual_coef_       = LA.solve(M, y)

        return self 
Example 14
Project: TextCategorization   Author: Y-oHr-N   File: base.py    MIT License 5 votes vote down vote up
def predict(self, X, Z):
        """Predict class labels for samples in X.

        Parameters
        ----------
        X : array-like, shape = [n_samples, n_features]
            Samples.

        Returns
        -------
        y : array-like, shape = [n_samples]
            Predictions for input data.
        """

        return rbf_kernel(X, Z, gamma=self.gamma_k) @ self.dual_coef_ 
Example 15
Project: dockerizeme   Author: dockerizeme   File: snippet.py    Apache License 2.0 5 votes vote down vote up
def transform(self, X, y=None):
        return rbf_kernel(X, self.centroids) 
Example 16
Project: dockerizeme   Author: dockerizeme   File: snippet.py    Apache License 2.0 5 votes vote down vote up
def __call__(self, a, b):
        return rbf_kernel(a, b, self.gamma) 
Example 17
Project: BO-DTI   Author: akiyamalab   File: blm.py    GNU General Public License v3.0 5 votes vote down vote up
def kernel_combination(self, R, S, new_inx, bandwidth):
        K = self.alpha*S+(1.0-self.alpha)*rbf_kernel(R, gamma=bandwidth)
        K[new_inx, :] = S[new_inx, :]
        K[:, new_inx] = S[:, new_inx]
        return K 
Example 18
Project: BO-DTI   Author: akiyamalab   File: wnngip.py    GNU General Public License v3.0 5 votes vote down vote up
def kernel_combination(self, R, S, new_inx, bandwidth):
        K = self.alpha*S+(1.0-self.alpha)*rbf_kernel(R, gamma=bandwidth)
        K[new_inx, :] = S[new_inx, :]
        K[:, new_inx] = S[:, new_inx]
        return K 
Example 19
Project: ParticleFlowBayesRule   Author: xinshi-chen   File: utils.py    MIT License 5 votes vote down vote up
def kernel_function(kernel_type):
        kernel_dict = {'gaussian': sk_metric.rbf_kernel,
                        'laplacian': sk_metric.laplacian_kernel,
                        'sigmoid': sk_metric.sigmoid_kernel,
                        'polynomial': sk_metric.polynomial_kernel,
                        'cosine': sk_metric.cosine_similarity,
                        'chi2': sk_metric.chi2_kernel
                        }
        return kernel_dict[kernel_type] 
Example 20
Project: ParticleFlowBayesRule   Author: xinshi-chen   File: metric.py    MIT License 5 votes vote down vote up
def square_mmd_fine(p_samples, q_samples, n_p, n_q, kernel_type):
    """
    n_p: number of samples from true distribution p

    assume n_p >> n_q
    """
    kernel_dict = {
        'gaussian': sk_metric.rbf_kernel,
        'laplacian': sk_metric.laplacian_kernel,
        'sigmoid': sk_metric.sigmoid_kernel,
        'polynomial': sk_metric.polynomial_kernel,
        'cosine': sk_metric.cosine_similarity,
    }

    kernel = kernel_dict[kernel_type]

    p_samples = np.array(p_samples)
    q_samples = np.array(q_samples)

    k_xi_xj = kernel(p_samples, p_samples)
    k_yi_yj = kernel(q_samples, q_samples)
    k_xi_yj = kernel(p_samples, q_samples)

    off_diag_k_xi_xj = (np.sum(k_xi_xj) - np.sum(np.diag(k_xi_xj))) / n_p / (n_p - 1)
    off_diag_k_yi_yj = (np.sum(k_yi_yj) - np.sum(np.diag(k_yi_yj))) / n_q / (n_q - 1)
    sum_k_xi_yj = np.sum(k_xi_yj) * 2 / n_p / n_q

    return off_diag_k_xi_xj + off_diag_k_yi_yj - sum_k_xi_yj 
Example 21
Project: RKHS-private-database   Author: matejbalog   File: kernels.py    MIT License 5 votes vote down vote up
def get_kernel_matrix(self, X):
        return rbf_kernel(X, gamma=self.gamma) 
Example 22
Project: RKHS-private-database   Author: matejbalog   File: kernels.py    MIT License 5 votes vote down vote up
def get_cross_kernel_matrix(self, X, Y):
        return rbf_kernel(X, Y, gamma=self.gamma) 
Example 23
Project: fsfc   Author: danilkolikov   File: SPEC.py    MIT License 5 votes vote down vote up
def _calc_scores(self, x):
        similarity = rbf_kernel(x)
        adjacency = similarity
        degree_vector = np.sum(adjacency, 1)
        degree = np.diag(degree_vector)
        laplacian = degree - adjacency
        normaliser_vector = np.reciprocal(np.sqrt(degree_vector))
        normaliser = np.diag(normaliser_vector)

        normalised_laplacian = normaliser.dot(laplacian).dot(normaliser)

        weighted_features = np.matmul(normaliser, x)

        normalised_features = weighted_features / np.linalg.norm(weighted_features, axis=0)
        return self._calc_spec_scores(degree, normalised_laplacian, normalised_features, normaliser) 
Example 24
Project: linear_neuron   Author: uglyboxer   File: test_pairwise.py    MIT License 5 votes vote down vote up
def callable_rbf_kernel(x, y, **kwds):
    # Callable version of pairwise.rbf_kernel.
    K = rbf_kernel(np.atleast_2d(x), np.atleast_2d(y), **kwds)
    return K 
Example 25
Project: linear_neuron   Author: uglyboxer   File: test_pairwise.py    MIT License 5 votes vote down vote up
def test_pairwise_kernels_filter_param():
    rng = np.random.RandomState(0)
    X = rng.random_sample((5, 4))
    Y = rng.random_sample((2, 4))
    K = rbf_kernel(X, Y, gamma=0.1)
    params = {"gamma": 0.1, "blabla": ":)"}
    K2 = pairwise_kernels(X, Y, metric="rbf", filter_params=True, **params)
    assert_array_almost_equal(K, K2)

    assert_raises(TypeError, pairwise_kernels, X, Y, "rbf", **params) 
Example 26
Project: linear_neuron   Author: uglyboxer   File: test_pairwise.py    MIT License 5 votes vote down vote up
def test_kernel_symmetry():
    # Valid kernels should be symmetric
    rng = np.random.RandomState(0)
    X = rng.random_sample((5, 4))
    for kernel in (linear_kernel, polynomial_kernel, rbf_kernel,
                   sigmoid_kernel, cosine_similarity):
        K = kernel(X, X)
        assert_array_almost_equal(K, K.T, 15) 
Example 27
Project: linear_neuron   Author: uglyboxer   File: test_pairwise.py    MIT License 5 votes vote down vote up
def test_kernel_sparse():
    rng = np.random.RandomState(0)
    X = rng.random_sample((5, 4))
    X_sparse = csr_matrix(X)
    for kernel in (linear_kernel, polynomial_kernel, rbf_kernel,
                   sigmoid_kernel, cosine_similarity):
        K = kernel(X, X)
        K2 = kernel(X_sparse, X_sparse)
        assert_array_almost_equal(K, K2) 
Example 28
Project: linear_neuron   Author: uglyboxer   File: test_svm.py    MIT License 5 votes vote down vote up
def test_decision_function():
    # Test decision_function
    # Sanity check, test that decision_function implemented in python
    # returns the same as the one in libsvm
    # multi class:
    clf = svm.SVC(kernel='linear', C=0.1).fit(iris.data, iris.target)

    dec = np.dot(iris.data, clf.coef_.T) + clf.intercept_

    assert_array_almost_equal(dec, clf.decision_function(iris.data))

    # binary:
    clf.fit(X, Y)
    dec = np.dot(X, clf.coef_.T) + clf.intercept_
    prediction = clf.predict(X)
    assert_array_almost_equal(dec.ravel(), clf.decision_function(X))
    assert_array_almost_equal(
        prediction,
        clf.classes_[(clf.decision_function(X) > 0).astype(np.int)])
    expected = np.array([-1., -0.66, -1., 0.66, 1., 1.])
    assert_array_almost_equal(clf.decision_function(X), expected, 2)

    # kernel binary:
    clf = svm.SVC(kernel='rbf', gamma=1)
    clf.fit(X, Y)
    
    rbfs = rbf_kernel(X, clf.support_vectors_, gamma=clf.gamma)
    dec = np.dot(rbfs, clf.dual_coef_.T) + clf.intercept_
    assert_array_almost_equal(dec.ravel(), clf.decision_function(X)) 
Example 29
Project: linear_neuron   Author: uglyboxer   File: test_kernel_approximation.py    MIT License 5 votes vote down vote up
def test_nystroem_approximation():
    # some basic tests
    rnd = np.random.RandomState(0)
    X = rnd.uniform(size=(10, 4))

    # With n_components = n_samples this is exact
    X_transformed = Nystroem(n_components=X.shape[0]).fit_transform(X)
    K = rbf_kernel(X)
    assert_array_almost_equal(np.dot(X_transformed, X_transformed.T), K)

    trans = Nystroem(n_components=2, random_state=rnd)
    X_transformed = trans.fit(X).transform(X)
    assert_equal(X_transformed.shape, (X.shape[0], 2))

    # test callable kernel
    linear_kernel = lambda X, Y: np.dot(X, Y.T)
    trans = Nystroem(n_components=2, kernel=linear_kernel, random_state=rnd)
    X_transformed = trans.fit(X).transform(X)
    assert_equal(X_transformed.shape, (X.shape[0], 2))

    # test that available kernels fit and transform
    kernels_available = kernel_metrics()
    for kern in kernels_available:
        trans = Nystroem(n_components=2, kernel=kern, random_state=rnd)
        X_transformed = trans.fit(X).transform(X)
        assert_equal(X_transformed.shape, (X.shape[0], 2)) 
Example 30
Project: linear_neuron   Author: uglyboxer   File: test_kernel_approximation.py    MIT License 5 votes vote down vote up
def test_nystroem_singular_kernel():
    # test that nystroem works with singular kernel matrix
    rng = np.random.RandomState(0)
    X = rng.rand(10, 20)
    X = np.vstack([X] * 2)  # duplicate samples

    gamma = 100
    N = Nystroem(gamma=gamma, n_components=X.shape[0]).fit(X)
    X_transformed = N.transform(X)

    K = rbf_kernel(X, gamma=gamma)

    assert_array_almost_equal(K, np.dot(X_transformed, X_transformed.T))
    assert_true(np.all(np.isfinite(Y))) 
Example 31
Project: linear_neuron   Author: uglyboxer   File: test_kernel_pca.py    MIT License 5 votes vote down vote up
def test_gridsearch_pipeline_precomputed():
    # Test if we can do a grid-search to find parameters to separate
    # circles with a perceptron model using a precomputed kernel.
    X, y = make_circles(n_samples=400, factor=.3, noise=.05,
                        random_state=0)
    kpca = KernelPCA(kernel="precomputed", n_components=2)
    pipeline = Pipeline([("kernel_pca", kpca), ("Perceptron", Perceptron())])
    param_grid = dict(Perceptron__n_iter=np.arange(1, 5))
    grid_search = GridSearchCV(pipeline, cv=3, param_grid=param_grid)
    X_kernel = rbf_kernel(X, gamma=2.)
    grid_search.fit(X_kernel, y)
    assert_equal(grid_search.best_score_, 1) 
Example 32
Project: PyDTI   Author: stephenliu0423   File: blm.py    GNU General Public License v2.0 5 votes vote down vote up
def kernel_combination(self, R, S, new_inx, bandwidth):
        K = self.alpha*S+(1.0-self.alpha)*rbf_kernel(R, gamma=bandwidth)
        K[new_inx, :] = S[new_inx, :]
        K[:, new_inx] = S[:, new_inx]
        return K 
Example 33
Project: PyDTI   Author: stephenliu0423   File: wnngip.py    GNU General Public License v2.0 5 votes vote down vote up
def kernel_combination(self, R, S, new_inx, bandwidth):
        K = self.alpha*S+(1.0-self.alpha)*rbf_kernel(R, gamma=bandwidth)
        K[new_inx, :] = S[new_inx, :]
        K[:, new_inx] = S[:, new_inx]
        return K 
Example 34
Project: scikit-learn-graph   Author: nickgentoo   File: komd.py    GNU General Public License v3.0 5 votes vote down vote up
def __kernel_definition__(self):
        if self.Kf == 'rbf':
            return lambda X,Y : rbf_kernel(X,Y,self.rbf_gamma)
        if self.Kf == 'poly':
            return lambda X,Y : polynomial_kernel(X, Y, degree=self.poly_deg, gamma=None, coef0=self.poly_coeff)
        if self.Kf == None or self.Kf == 'linear':
            return lambda X,Y : linear_kernel(X,Y) 
Example 35
Project: scikit-learn-graph   Author: nickgentoo   File: komd.py    GNU General Public License v3.0 5 votes vote down vote up
def __kernel_definition__(self):
        if self.Kf == 'rbf':
            return lambda X,Y : rbf_kernel(X,Y,self.rbf_gamma)
        if self.Kf == 'poly':
            return lambda X,Y : polynomial_kernel(X, Y, degree=self.poly_deg, gamma=None, coef0=self.poly_coeff)
        if self.Kf == None or self.Kf == 'linear':
            return lambda X,Y : linear_kernel(X,Y) 
Example 36
Project: scikit-learn-graph   Author: nickgentoo   File: komd.py    GNU General Public License v3.0 5 votes vote down vote up
def __kernel_definition__(self):
        if self.Kf == 'rbf':
            return lambda X,Y : rbf_kernel(X,Y,self.rbf_gamma)
        if self.Kf == 'poly':
            return lambda X,Y : polynomial_kernel(X, Y, degree=self.poly_deg, gamma=None, coef0=self.poly_coeff)
        if self.Kf == None or self.Kf == 'linear':
            return lambda X,Y : linear_kernel(X,Y) 
Example 37
Project: scikit-learn-graph   Author: nickgentoo   File: komd.py    GNU General Public License v3.0 5 votes vote down vote up
def __kernel_definition__(self):
        if self.Kf == 'rbf':
            return lambda X,Y : rbf_kernel(X,Y,self.rbf_gamma)
        if self.Kf == 'poly':
            return lambda X,Y : polynomial_kernel(X, Y, degree=self.poly_deg, gamma=None, coef0=self.poly_coeff)
        if self.Kf == None or self.Kf == 'linear':
            return lambda X,Y : linear_kernel(X,Y) 
Example 38
Project: scikit-learn-graph   Author: nickgentoo   File: komd.py    GNU General Public License v3.0 5 votes vote down vote up
def __kernel_definition__(self):
        if self.Kf == 'rbf':
            return lambda X,Y : rbf_kernel(X,Y,self.rbf_gamma)
        if self.Kf == 'poly':
            return lambda X,Y : polynomial_kernel(X, Y, degree=self.poly_deg, gamma=None, coef0=self.poly_coeff)
        if self.Kf == None or self.Kf == 'linear':
            return lambda X,Y : linear_kernel(X,Y) 
Example 39
Project: scikit-learn-graph   Author: nickgentoo   File: komd.py    GNU General Public License v3.0 5 votes vote down vote up
def __kernel_definition__(self):
        if self.Kf == 'rbf':
            return lambda X,Y : rbf_kernel(X,Y,self.rbf_gamma)
        if self.Kf == 'poly':
            return lambda X,Y : polynomial_kernel(X, Y, degree=self.poly_deg, gamma=None, coef0=self.poly_coeff)
        if self.Kf == None or self.Kf == 'linear':
            return lambda X,Y : linear_kernel(X,Y) 
Example 40
Project: scikit-learn-graph   Author: nickgentoo   File: komd.py    GNU General Public License v3.0 5 votes vote down vote up
def __kernel_definition__(self):
        if self.Kf == 'rbf':
            return lambda X,Y : rbf_kernel(X,Y,self.rbf_gamma)
        if self.Kf == 'poly':
            return lambda X,Y : polynomial_kernel(X, Y, degree=self.poly_deg, gamma=None, coef0=self.poly_coeff)
        if self.Kf == None or self.Kf == 'linear':
            return lambda X,Y : linear_kernel(X,Y) 
Example 41
Project: scikit-learn-graph   Author: nickgentoo   File: komd.py    GNU General Public License v3.0 5 votes vote down vote up
def __kernel_definition__(self):
        if self.Kf == 'rbf':
            return lambda X,Y : rbf_kernel(X,Y,self.rbf_gamma)
        if self.Kf == 'poly':
            return lambda X,Y : polynomial_kernel(X, Y, degree=self.poly_deg, gamma=None, coef0=self.poly_coeff)
        if self.Kf == None or self.Kf == 'linear':
            return lambda X,Y : linear_kernel(X,Y) 
Example 42
Project: scikit-learn-graph   Author: nickgentoo   File: komd.py    GNU General Public License v3.0 5 votes vote down vote up
def __kernel_definition__(self):
        if self.Kf == 'rbf':
            return lambda X,Y : rbf_kernel(X,Y,self.rbf_gamma)
        if self.Kf == 'poly':
            return lambda X,Y : polynomial_kernel(X, Y, degree=self.poly_deg, gamma=None, coef0=self.poly_coeff)
        if self.Kf == None or self.Kf == 'linear':
            return lambda X,Y : linear_kernel(X,Y) 
Example 43
Project: scikit-learn-graph   Author: nickgentoo   File: komd.py    GNU General Public License v3.0 5 votes vote down vote up
def __kernel_definition__(self):
        if self.Kf == 'rbf':
            return lambda X,Y : rbf_kernel(X,Y,self.rbf_gamma)
        if self.Kf == 'poly':
            return lambda X,Y : polynomial_kernel(X, Y, degree=self.poly_deg, gamma=None, coef0=self.poly_coeff)
        if self.Kf == None or self.Kf == 'linear':
            return lambda X,Y : linear_kernel(X,Y) 
Example 44
Project: scikit-learn-graph   Author: nickgentoo   File: komd.py    GNU General Public License v3.0 5 votes vote down vote up
def __kernel_definition__(self):
        if self.Kf == 'rbf':
            return lambda X,Y : rbf_kernel(X,Y,self.rbf_gamma)
        if self.Kf == 'poly':
            return lambda X,Y : polynomial_kernel(X, Y, degree=self.poly_deg, gamma=None, coef0=self.poly_coeff)
        if self.Kf == None or self.Kf == 'linear':
            return lambda X,Y : linear_kernel(X,Y) 
Example 45
Project: scikit-learn-graph   Author: nickgentoo   File: komd.py    GNU General Public License v3.0 5 votes vote down vote up
def __kernel_definition__(self):
        if self.Kf == 'rbf':
            return lambda X,Y : rbf_kernel(X,Y,self.rbf_gamma)
        if self.Kf == 'poly':
            return lambda X,Y : polynomial_kernel(X, Y, degree=self.poly_deg, gamma=None, coef0=self.poly_coeff)
        if self.Kf == None or self.Kf == 'linear':
            return lambda X,Y : linear_kernel(X,Y) 
Example 46
Project: scikit-learn-graph   Author: nickgentoo   File: komd.py    GNU General Public License v3.0 5 votes vote down vote up
def __kernel_definition__(self):
        if self.Kf == 'rbf':
            return lambda X,Y : rbf_kernel(X,Y,self.rbf_gamma)
        if self.Kf == 'poly':
            return lambda X,Y : polynomial_kernel(X, Y, degree=self.poly_deg, gamma=None, coef0=self.poly_coeff)
        if self.Kf == None or self.Kf == 'linear':
            return lambda X,Y : linear_kernel(X,Y) 
Example 47
Project: scikit-learn-graph   Author: nickgentoo   File: komd.py    GNU General Public License v3.0 5 votes vote down vote up
def __kernel_definition__(self):
        if self.Kf == 'rbf':
            return lambda X,Y : rbf_kernel(X,Y,self.rbf_gamma)
        if self.Kf == 'poly':
            return lambda X,Y : polynomial_kernel(X, Y, degree=self.poly_deg, gamma=None, coef0=self.poly_coeff)
        if self.Kf == None or self.Kf == 'linear':
            return lambda X,Y : linear_kernel(X,Y) 
Example 48
Project: scikit-learn-graph   Author: nickgentoo   File: komd.py    GNU General Public License v3.0 5 votes vote down vote up
def __kernel_definition__(self):
        if self.Kf == 'rbf':
            return lambda X,Y : rbf_kernel(X,Y,self.rbf_gamma)
        if self.Kf == 'poly':
            return lambda X,Y : polynomial_kernel(X, Y, degree=self.poly_deg, gamma=None, coef0=self.poly_coeff)
        if self.Kf == None or self.Kf == 'linear':
            return lambda X,Y : linear_kernel(X,Y) 
Example 49
Project: scikit-learn-graph   Author: nickgentoo   File: komd.py    GNU General Public License v3.0 5 votes vote down vote up
def __kernel_definition__(self):
        if self.Kf == 'rbf':
            return lambda X,Y : rbf_kernel(X,Y,self.rbf_gamma)
        if self.Kf == 'poly':
            return lambda X,Y : polynomial_kernel(X, Y, degree=self.poly_deg, gamma=None, coef0=self.poly_coeff)
        if self.Kf == None or self.Kf == 'linear':
            return lambda X,Y : linear_kernel(X,Y) 
Example 50
Project: scikit-learn-graph   Author: nickgentoo   File: komd.py    GNU General Public License v3.0 5 votes vote down vote up
def __kernel_definition__(self):
        if self.Kf == 'rbf':
            return lambda X,Y : rbf_kernel(X,Y,self.rbf_gamma)
        if self.Kf == 'poly':
            return lambda X,Y : polynomial_kernel(X, Y, degree=self.poly_deg, gamma=None, coef0=self.poly_coeff)
        if self.Kf == None or self.Kf == 'linear':
            return lambda X,Y : linear_kernel(X,Y) 
Example 51
Project: scikit-learn-graph   Author: nickgentoo   File: komd.py    GNU General Public License v3.0 5 votes vote down vote up
def __kernel_definition__(self):
        if self.Kf == 'rbf':
            return lambda X,Y : rbf_kernel(X,Y,self.rbf_gamma)
        if self.Kf == 'poly':
            return lambda X,Y : polynomial_kernel(X, Y, degree=self.poly_deg, gamma=None, coef0=self.poly_coeff)
        if self.Kf == None or self.Kf == 'linear':
            return lambda X,Y : linear_kernel(X,Y) 
Example 52
Project: scikit-learn-graph   Author: nickgentoo   File: komd.py    GNU General Public License v3.0 5 votes vote down vote up
def __kernel_definition__(self):
        if self.Kf == 'rbf':
            return lambda X,Y : rbf_kernel(X,Y,self.rbf_gamma)
        if self.Kf == 'poly':
            return lambda X,Y : polynomial_kernel(X, Y, degree=self.poly_deg, gamma=None, coef0=self.poly_coeff)
        if self.Kf == None or self.Kf == 'linear':
            return lambda X,Y : linear_kernel(X,Y) 
Example 53
Project: scikit-learn-graph   Author: nickgentoo   File: komd.py    GNU General Public License v3.0 5 votes vote down vote up
def __kernel_definition__(self):
        if self.Kf == 'rbf':
            return lambda X,Y : rbf_kernel(X,Y,self.rbf_gamma)
        if self.Kf == 'poly':
            return lambda X,Y : polynomial_kernel(X, Y, degree=self.poly_deg, gamma=None, coef0=self.poly_coeff)
        if self.Kf == None or self.Kf == 'linear':
            return lambda X,Y : linear_kernel(X,Y) 
Example 54
Project: scikit-learn-graph   Author: nickgentoo   File: komd.py    GNU General Public License v3.0 5 votes vote down vote up
def __kernel_definition__(self):
        if self.Kf == 'rbf':
            return lambda X,Y : rbf_kernel(X,Y,self.rbf_gamma)
        if self.Kf == 'poly':
            return lambda X,Y : polynomial_kernel(X, Y, degree=self.poly_deg, gamma=None, coef0=self.poly_coeff)
        if self.Kf == None or self.Kf == 'linear':
            return lambda X,Y : linear_kernel(X,Y) 
Example 55
Project: scikit-learn-graph   Author: nickgentoo   File: komd.py    GNU General Public License v3.0 5 votes vote down vote up
def __kernel_definition__(self):
        if self.Kf == 'rbf':
            return lambda X,Y : rbf_kernel(X,Y,self.rbf_gamma)
        if self.Kf == 'poly':
            return lambda X,Y : polynomial_kernel(X, Y, degree=self.poly_deg, gamma=None, coef0=self.poly_coeff)
        if self.Kf == None or self.Kf == 'linear':
            return lambda X,Y : linear_kernel(X,Y) 
Example 56
Project: scikit-learn-graph   Author: nickgentoo   File: komd.py    GNU General Public License v3.0 5 votes vote down vote up
def __kernel_definition__(self):
        if self.Kf == 'rbf':
            return lambda X,Y : rbf_kernel(X,Y,self.rbf_gamma)
        if self.Kf == 'poly':
            return lambda X,Y : polynomial_kernel(X, Y, degree=self.poly_deg, gamma=None, coef0=self.poly_coeff)
        if self.Kf == None or self.Kf == 'linear':
            return lambda X,Y : linear_kernel(X,Y) 
Example 57
Project: scikit-learn-graph   Author: nickgentoo   File: komd.py    GNU General Public License v3.0 5 votes vote down vote up
def __kernel_definition__(self):
        if self.Kf == 'rbf':
            return lambda X,Y : rbf_kernel(X,Y,self.rbf_gamma)
        if self.Kf == 'poly':
            return lambda X,Y : polynomial_kernel(X, Y, degree=self.poly_deg, gamma=None, coef0=self.poly_coeff)
        if self.Kf == None or self.Kf == 'linear':
            return lambda X,Y : linear_kernel(X,Y) 
Example 58
Project: scikit-learn-graph   Author: nickgentoo   File: komd.py    GNU General Public License v3.0 5 votes vote down vote up
def __kernel_definition__(self):
        if self.Kf == 'rbf':
            return lambda X,Y : rbf_kernel(X,Y,self.rbf_gamma)
        if self.Kf == 'poly':
            return lambda X,Y : polynomial_kernel(X, Y, degree=self.poly_deg, gamma=None, coef0=self.poly_coeff)
        if self.Kf == None or self.Kf == 'linear':
            return lambda X,Y : linear_kernel(X,Y) 
Example 59
Project: scikit-learn-graph   Author: nickgentoo   File: komd.py    GNU General Public License v3.0 5 votes vote down vote up
def __kernel_definition__(self):
        if self.Kf == 'rbf':
            return lambda X,Y : rbf_kernel(X,Y,self.rbf_gamma)
        if self.Kf == 'poly':
            return lambda X,Y : polynomial_kernel(X, Y, degree=self.poly_deg, gamma=None, coef0=self.poly_coeff)
        if self.Kf == None or self.Kf == 'linear':
            return lambda X,Y : linear_kernel(X,Y) 
Example 60
Project: scikit-learn-graph   Author: nickgentoo   File: komd.py    GNU General Public License v3.0 5 votes vote down vote up
def __kernel_definition__(self):
        if self.Kf == 'rbf':
            return lambda X,Y : rbf_kernel(X,Y,self.rbf_gamma)
        if self.Kf == 'poly':
            return lambda X,Y : polynomial_kernel(X, Y, degree=self.poly_deg, gamma=None, coef0=self.poly_coeff)
        if self.Kf == None or self.Kf == 'linear':
            return lambda X,Y : linear_kernel(X,Y) 
Example 61
Project: scikit-learn-graph   Author: nickgentoo   File: komd.py    GNU General Public License v3.0 5 votes vote down vote up
def __kernel_definition__(self):
        if self.Kf == 'rbf':
            return lambda X,Y : rbf_kernel(X,Y,self.rbf_gamma)
        if self.Kf == 'poly':
            return lambda X,Y : polynomial_kernel(X, Y, degree=self.poly_deg, gamma=None, coef0=self.poly_coeff)
        if self.Kf == None or self.Kf == 'linear':
            return lambda X,Y : linear_kernel(X,Y) 
Example 62
Project: scikit-learn-graph   Author: nickgentoo   File: komd.py    GNU General Public License v3.0 5 votes vote down vote up
def __kernel_definition__(self):
        if self.Kf == 'rbf':
            return lambda X,Y : rbf_kernel(X,Y,self.rbf_gamma)
        if self.Kf == 'poly':
            return lambda X,Y : polynomial_kernel(X, Y, degree=self.poly_deg, gamma=None, coef0=self.poly_coeff)
        if self.Kf == None or self.Kf == 'linear':
            return lambda X,Y : linear_kernel(X,Y) 
Example 63
Project: scikit-learn-graph   Author: nickgentoo   File: komd.py    GNU General Public License v3.0 5 votes vote down vote up
def __kernel_definition__(self):
        if self.Kf == 'rbf':
            return lambda X,Y : rbf_kernel(X,Y,self.rbf_gamma)
        if self.Kf == 'poly':
            return lambda X,Y : polynomial_kernel(X, Y, degree=self.poly_deg, gamma=None, coef0=self.poly_coeff)
        if self.Kf == None or self.Kf == 'linear':
            return lambda X,Y : linear_kernel(X,Y) 
Example 64
Project: gplearn   Author: trevorstephens   File: check_estimator.py    BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def check_estimators_overwrite_params(name, estimator_orig):
    X, y = make_blobs(random_state=0, n_samples=9)
    y = (y > 1).astype(int)
    # some want non-negative input
    X -= X.min()
    X = pairwise_estimator_convert_X(X, estimator_orig, kernel=rbf_kernel)
    estimator = clone(estimator_orig)
    y = multioutput_estimator_convert_y_2d(estimator, y)

    set_random_state(estimator)

    # Make a physical copy of the original estimator parameters before fitting.
    params = estimator.get_params()
    original_params = deepcopy(params)

    # Fit the model
    estimator.fit(X, y)

    # Compare the state of the model parameters with the original parameters
    new_params = estimator.get_params()
    for param_name, original_value in original_params.items():
        new_value = new_params[param_name]

        # We should never change or mutate the internal state of input
        # parameters by default. To check this we use the joblib.hash function
        # that introspects recursively any subobjects to compute a checksum.
        # The only exception to this rule of immutable constructor parameters
        # is possible RandomState instance but in this check we explicitly
        # fixed the random_state params recursively to be integer seeds.
        assert_equal(_joblib.hash(new_value), _joblib.hash(original_value),
                     "Estimator %s should not change or mutate "
                     " the parameter %s from %s to %s during fit."
                     % (name, param_name, original_value, new_value)) 
Example 65
Project: Weiss   Author: WangWenjun559   File: test_pairwise.py    Apache License 2.0 5 votes vote down vote up
def callable_rbf_kernel(x, y, **kwds):
    # Callable version of pairwise.rbf_kernel.
    K = rbf_kernel(np.atleast_2d(x), np.atleast_2d(y), **kwds)
    return K 
Example 66
Project: Weiss   Author: WangWenjun559   File: test_pairwise.py    Apache License 2.0 5 votes vote down vote up
def test_pairwise_kernels_filter_param():
    rng = np.random.RandomState(0)
    X = rng.random_sample((5, 4))
    Y = rng.random_sample((2, 4))
    K = rbf_kernel(X, Y, gamma=0.1)
    params = {"gamma": 0.1, "blabla": ":)"}
    K2 = pairwise_kernels(X, Y, metric="rbf", filter_params=True, **params)
    assert_array_almost_equal(K, K2)

    assert_raises(TypeError, pairwise_kernels, X, Y, "rbf", **params) 
Example 67
Project: Weiss   Author: WangWenjun559   File: test_pairwise.py    Apache License 2.0 5 votes vote down vote up
def test_kernel_symmetry():
    # Valid kernels should be symmetric
    rng = np.random.RandomState(0)
    X = rng.random_sample((5, 4))
    for kernel in (linear_kernel, polynomial_kernel, rbf_kernel,
                   sigmoid_kernel, cosine_similarity):
        K = kernel(X, X)
        assert_array_almost_equal(K, K.T, 15) 
Example 68
Project: Weiss   Author: WangWenjun559   File: test_pairwise.py    Apache License 2.0 5 votes vote down vote up
def test_kernel_sparse():
    rng = np.random.RandomState(0)
    X = rng.random_sample((5, 4))
    X_sparse = csr_matrix(X)
    for kernel in (linear_kernel, polynomial_kernel, rbf_kernel,
                   sigmoid_kernel, cosine_similarity):
        K = kernel(X, X)
        K2 = kernel(X_sparse, X_sparse)
        assert_array_almost_equal(K, K2) 
Example 69
Project: Weiss   Author: WangWenjun559   File: test_spectral_embedding.py    Apache License 2.0 5 votes vote down vote up
def test_spectral_embedding_precomputed_affinity(seed=36):
    # Test spectral embedding with precomputed kernel
    gamma = 1.0
    se_precomp = SpectralEmbedding(n_components=2, affinity="precomputed",
                                   random_state=np.random.RandomState(seed))
    se_rbf = SpectralEmbedding(n_components=2, affinity="rbf",
                               gamma=gamma,
                               random_state=np.random.RandomState(seed))
    embed_precomp = se_precomp.fit_transform(rbf_kernel(S, gamma=gamma))
    embed_rbf = se_rbf.fit_transform(S)
    assert_array_almost_equal(
        se_precomp.affinity_matrix_, se_rbf.affinity_matrix_)
    assert_true(_check_with_col_sign_flipping(embed_precomp, embed_rbf, 0.05)) 
Example 70
Project: Weiss   Author: WangWenjun559   File: test_spectral_embedding.py    Apache License 2.0 5 votes vote down vote up
def test_spectral_embedding_deterministic():
    # Test that Spectral Embedding is deterministic
    random_state = np.random.RandomState(36)
    data = random_state.randn(10, 30)
    sims = rbf_kernel(data)
    embedding_1 = spectral_embedding(sims)
    embedding_2 = spectral_embedding(sims)
    assert_array_almost_equal(embedding_1, embedding_2) 
Example 71
Project: Weiss   Author: WangWenjun559   File: test_svm.py    Apache License 2.0 5 votes vote down vote up
def test_decision_function():
    # Test decision_function
    # Sanity check, test that decision_function implemented in python
    # returns the same as the one in libsvm
    # multi class:
    clf = svm.SVC(kernel='linear', C=0.1).fit(iris.data, iris.target)

    dec = np.dot(iris.data, clf.coef_.T) + clf.intercept_

    assert_array_almost_equal(dec, clf.decision_function(iris.data))

    # binary:
    clf.fit(X, Y)
    dec = np.dot(X, clf.coef_.T) + clf.intercept_
    prediction = clf.predict(X)
    assert_array_almost_equal(dec.ravel(), clf.decision_function(X))
    assert_array_almost_equal(
        prediction,
        clf.classes_[(clf.decision_function(X) > 0).astype(np.int)])
    expected = np.array([-1., -0.66, -1., 0.66, 1., 1.])
    assert_array_almost_equal(clf.decision_function(X), expected, 2)

    # kernel binary:
    clf = svm.SVC(kernel='rbf', gamma=1)
    clf.fit(X, Y)

    rbfs = rbf_kernel(X, clf.support_vectors_, gamma=clf.gamma)
    dec = np.dot(rbfs, clf.dual_coef_.T) + clf.intercept_
    assert_array_almost_equal(dec.ravel(), clf.decision_function(X)) 
Example 72
Project: Weiss   Author: WangWenjun559   File: test_kernel_approximation.py    Apache License 2.0 5 votes vote down vote up
def test_nystroem_approximation():
    # some basic tests
    rnd = np.random.RandomState(0)
    X = rnd.uniform(size=(10, 4))

    # With n_components = n_samples this is exact
    X_transformed = Nystroem(n_components=X.shape[0]).fit_transform(X)
    K = rbf_kernel(X)
    assert_array_almost_equal(np.dot(X_transformed, X_transformed.T), K)

    trans = Nystroem(n_components=2, random_state=rnd)
    X_transformed = trans.fit(X).transform(X)
    assert_equal(X_transformed.shape, (X.shape[0], 2))

    # test callable kernel
    linear_kernel = lambda X, Y: np.dot(X, Y.T)
    trans = Nystroem(n_components=2, kernel=linear_kernel, random_state=rnd)
    X_transformed = trans.fit(X).transform(X)
    assert_equal(X_transformed.shape, (X.shape[0], 2))

    # test that available kernels fit and transform
    kernels_available = kernel_metrics()
    for kern in kernels_available:
        trans = Nystroem(n_components=2, kernel=kern, random_state=rnd)
        X_transformed = trans.fit(X).transform(X)
        assert_equal(X_transformed.shape, (X.shape[0], 2)) 
Example 73
Project: Weiss   Author: WangWenjun559   File: test_kernel_approximation.py    Apache License 2.0 5 votes vote down vote up
def test_nystroem_singular_kernel():
    # test that nystroem works with singular kernel matrix
    rng = np.random.RandomState(0)
    X = rng.rand(10, 20)
    X = np.vstack([X] * 2)  # duplicate samples

    gamma = 100
    N = Nystroem(gamma=gamma, n_components=X.shape[0]).fit(X)
    X_transformed = N.transform(X)

    K = rbf_kernel(X, gamma=gamma)

    assert_array_almost_equal(K, np.dot(X_transformed, X_transformed.T))
    assert_true(np.all(np.isfinite(Y))) 
Example 74
Project: Weiss   Author: WangWenjun559   File: test_spectral.py    Apache License 2.0 5 votes vote down vote up
def test_spectral_clustering_sparse():
    X, y = make_blobs(n_samples=20, random_state=0,
                      centers=[[1, 1], [-1, -1]], cluster_std=0.01)

    S = rbf_kernel(X, gamma=1)
    S = np.maximum(S - 1e-4, 0)
    S = sparse.coo_matrix(S)

    labels = SpectralClustering(random_state=0, n_clusters=2,
                                affinity='precomputed').fit(S).labels_
    assert_equal(adjusted_rand_score(y, labels), 1) 
Example 75
Project: TextCategorization   Author: Y-oHr-N   File: laplacian_svm.py    MIT License 4 votes vote down vote up
def fit(self, X, y, L):
        """Fit the model according to the given training data.

        Prameters
        ---------
        X : array-like, shpae = [n_samples, n_features]
            Training data.

        y : array-like, shpae = [n_samples]
            Target values (unlabeled points are marked as 0).

        L : array-like, shpae = [n_samples, n_samples]
            Graph Laplacian.
        """

        labeled               = y != 0
        y_labeled             = y[labeled]
        n_samples, n_features = X.shape
        n_labeled_samples     = y_labeled.size
        I                     = sp.eye(n_samples)
        Y                     = sp.diags(y_labeled)
        J                     = sp.eye(n_labeled_samples, n_samples)
        K                     = rbf_kernel(X, gamma=self.gamma_k)
        M                     = 2 * self.gamma_a * I \
            + 2 * self.gamma_i / n_samples**2 * L**self.p @ K

        # Construct the QP, invoke solver
        solvers.options['show_progress'] = False
        sol                   = solvers.qp(
            P                 = matrix(Y @ J @ K @ LA.inv(M) @ J.T @ Y),
            q                 = matrix(-1 * np.ones(n_labeled_samples)),
            G                 = matrix(np.vstack((
                -1 * np.eye(n_labeled_samples),
                n_labeled_samples * np.eye(n_labeled_samples)
            ))),
            h                 = matrix(np.hstack((
                np.zeros(n_labeled_samples),
                np.ones(n_labeled_samples)
            ))),
            A                 = matrix(y_labeled, (1, n_labeled_samples), 'd'),
            b                 = matrix(0.0)
        )

        # Train a classifer
        self.dual_coef_       = LA.solve(M, J.T @ Y @ np.array(sol['x']).ravel())

        return self 
Example 76
Project: scikit-learn-extra   Author: scikit-learn-contrib   File: test_fastfood.py    BSD 3-Clause "New" or "Revised" License 4 votes vote down vote up
def test_fastfood():
    """test that Fastfood fast approximates kernel on random data"""
    # compute exact kernel
    gamma = 10.0
    kernel = rbf_kernel(X, Y, gamma=gamma)

    sigma = np.sqrt(1 / (2 * gamma))

    # approximate kernel mapping
    ff_transform = Fastfood(sigma, n_components=1000, random_state=42)

    pars = ff_transform.fit(X)
    X_trans = pars.transform(X)
    Y_trans = ff_transform.transform(Y)

    # print X_trans, Y_trans
    kernel_approx = np.dot(X_trans, Y_trans.T)

    print("approximation:", kernel_approx[:5, :5])
    print("true kernel:", kernel[:5, :5])
    assert_array_almost_equal(kernel, kernel_approx, decimal=1)


# def test_fastfood_mem_or_accuracy():
#     """compares the performance of Fastfood and RKS"""
#     #generate data
#     X = rng.random_sample(size=(10000, 4000))
#     X /= X.sum(axis=1)[:, np.newaxis]
#
#     # calculate feature maps
#     gamma = 10.
#     sigma = np.sqrt(1 / (2 * gamma))
#     number_of_features_to_generate = 1000
#
#
#
#     fastfood_start = datetime.datetime.utcnow()
#     # Fastfood: approximate kernel mapping
#     rbf_transform = Fastfood(
#          sigma=sigma, n_components=number_of_features_to_generate,
#          tradeoff_less_mem_or_higher_accuracy='accuracy', random_state=42)
#     _ = rbf_transform.fit_transform(X)
#     fastfood_end = datetime.datetime.utcnow()
#     fastfood_spent_time =fastfood_end- fastfood_start
#     print "Timimg fastfood accuracy: \t\t", fastfood_spent_time
#
#
#     fastfood_mem_start = datetime.datetime.utcnow()
#     # Fastfood: approximate kernel mapping
#     rbf_transform = Fastfood(
#            sigma=sigma, n_components=number_of_features_to_generate,
#            tradeoff_less_mem_or_higher_accuracy='mem', random_state=42)
#     _ = rbf_transform.fit_transform(X)
#     fastfood_mem_end = datetime.datetime.utcnow()
#     fastfood_mem_spent_time = fastfood_mem_end- fastfood_mem_start
#     print "Timimg fastfood memory: \t\t", fastfood_mem_spent_time
#
#     assert_greater(fastfood_spent_time, fastfood_mem_spent_time) 
Example 77
Project: linear_neuron   Author: uglyboxer   File: test_pairwise.py    MIT License 4 votes vote down vote up
def test_pairwise_kernels():
    # Test the pairwise_kernels helper function.

    rng = np.random.RandomState(0)
    X = rng.random_sample((5, 4))
    Y = rng.random_sample((2, 4))
    # Test with all metrics that should be in PAIRWISE_KERNEL_FUNCTIONS.
    test_metrics = ["rbf", "sigmoid", "polynomial", "linear", "chi2",
                    "additive_chi2"]
    for metric in test_metrics:
        function = PAIRWISE_KERNEL_FUNCTIONS[metric]
        # Test with Y=None
        K1 = pairwise_kernels(X, metric=metric)
        K2 = function(X)
        assert_array_almost_equal(K1, K2)
        # Test with Y=Y
        K1 = pairwise_kernels(X, Y=Y, metric=metric)
        K2 = function(X, Y=Y)
        assert_array_almost_equal(K1, K2)
        # Test with tuples as X and Y
        X_tuples = tuple([tuple([v for v in row]) for row in X])
        Y_tuples = tuple([tuple([v for v in row]) for row in Y])
        K2 = pairwise_kernels(X_tuples, Y_tuples, metric=metric)
        assert_array_almost_equal(K1, K2)

        # Test with sparse X and Y
        X_sparse = csr_matrix(X)
        Y_sparse = csr_matrix(Y)
        if metric in ["chi2", "additive_chi2"]:
            # these don't support sparse matrices yet
            assert_raises(ValueError, pairwise_kernels,
                          X_sparse, Y=Y_sparse, metric=metric)
            continue
        K1 = pairwise_kernels(X_sparse, Y=Y_sparse, metric=metric)
        assert_array_almost_equal(K1, K2)
    # Test with a callable function, with given keywords.
    metric = callable_rbf_kernel
    kwds = {}
    kwds['gamma'] = 0.1
    K1 = pairwise_kernels(X, Y=Y, metric=metric, **kwds)
    K2 = rbf_kernel(X, Y=Y, **kwds)
    assert_array_almost_equal(K1, K2)

    # callable function, X=Y
    K1 = pairwise_kernels(X, Y=X, metric=metric, **kwds)
    K2 = rbf_kernel(X, Y=X, **kwds)
    assert_array_almost_equal(K1, K2) 
Example 78
Project: RGAN   Author: ratschlab   File: eugenium_mmd.py    MIT License 4 votes vote down vote up
def MMD_3_Sample_Test(X, Y, Z, sigma=-1, SelectSigma=True, computeMMDs=False):
    '''Performs the relative MMD test which returns a test statistic for whether Y is closer to X or than Z.
    See http://arxiv.org/pdf/1511.04581.pdf
    The bandwith heuristic is based on the median heuristic (see Smola,Gretton).
    '''
    if(sigma<0):
        #Similar heuristics
        if SelectSigma:
            siz=np.min((1000, X.shape[0]))
            sigma1=kernelwidthPair(X[0:siz], Y[0:siz]);
            sigma2=kernelwidthPair(X[0:siz], Z[0:siz]);
            sigma=(sigma1+sigma2)/2.
        else:
            siz=np.min((1000, X.shape[0]*3))
            Zem=np.r_[X[0:siz/3], Y[0:siz/3], Z[0:siz/3]]
            sigma=kernelwidth(Zem);

    #kernel = partial(rbf_kernel, gamma=1.0/(sigma**2))
    kernel = partial(my_kernel, sigma=sigma)
    #kernel = partial(grbf, sigma=sigma)

    Kyy = kernel(Y, Y)
    Kzz = kernel(Z, Z)
    Kxy = kernel(X, Y)
    Kxz = kernel(X, Z)

    Kyynd = Kyy-np.diag(np.diagonal(Kyy))
    Kzznd = Kzz-np.diag(np.diagonal(Kzz))
    m = Kxy.shape[0];
    n = Kyy.shape[0];
    r = Kzz.shape[0];    

    
    u_yy=np.sum(Kyynd)*( 1./(n*(n-1)) )
    u_zz=np.sum(Kzznd)*( 1./(r*(r-1)) )
    u_xy=np.sum(Kxy)/(m*n)
    u_xz=np.sum(Kxz)/(m*r)
    #Compute the test statistic
    t=u_yy - 2.*u_xy - (u_zz-2.*u_xz)
    Diff_Var, Diff_Var_z2, data=MMD_Diff_Var(Kyy, Kzz, Kxy, Kxz)

    pvalue=sp.stats.norm.cdf(-t/np.sqrt((Diff_Var)))
  #  pvalue_z2=sp.stats.norm.cdf(-t/np.sqrt((Diff_Var_z2)))
    tstat=t/sqrt(Diff_Var)
    
    if(computeMMDs):
         Kxx = kernel(X, X)
         Kxxnd = Kxx-np.diag(np.diagonal(Kxx))
         u_xx=np.sum(Kxxnd)*( 1./(m*(m-1)) )
         MMDXY=u_xx+u_yy-2.*u_xy
         MMDXZ=u_xx+u_zz-2.*u_xz
    else:
         MMDXY=None
         MMDXZ=None
    return pvalue, tstat, sigma, MMDXY, MMDXZ 
Example 79
Project: RGAN   Author: ratschlab   File: data_utils.py    MIT License 4 votes vote down vote up
def changepoint(seq_length=30, num_samples=28*5*100):
    """
    Generate data from two GPs, roughly speaking.
    The first part (up to m) is as a normal GP.
    The second part (m to end) has a linear downwards trend conditioned on the
    first part.
    """
    print('Generating samples from changepoint...')
    T = np.arange(seq_length)
    # sample breakpoint from latter half of sequence
    m_s = np.random.choice(np.arange(int(seq_length/2), seq_length-1), size=num_samples)
    samples = np.zeros(shape=(num_samples, seq_length, 1))
    # kernel parameters and stuff
    gamma=5.0/seq_length
    A = 0.01
    sigmasq = 0.8*A
    lamb = 0.0  # if non-zero, cov_M risks not being positive semidefinite...
    kernel = partial(rbf_kernel, gamma=gamma)
    # multiple values per m
    N_ms = []
    cov_ms = []
    cov_Ms = []
    pdfs = []
    for m in range(int(seq_length/2), seq_length-1):
        # first part
        M = seq_length - m
        T_m = T[:m].reshape(m, 1)
        cov_m = A*kernel(T_m.reshape(-1, 1), T_m.reshape(-1, 1))
        cov_ms.append(cov_m)
        # the second part
        T_M = T[m:].reshape(M, 1)
        cov_mM = kernel(T_M.reshape(-1, 1), T_m.reshape(-1, 1))
        cov_M = sigmasq*(np.eye(M) - lamb*np.dot(np.dot(cov_mM, np.linalg.inv(cov_m)), cov_mM.T))
        cov_Ms.append(cov_M)
    for n in range(num_samples):
        m = m_s[n]
        M = seq_length-m
        # sample the first m
        cov_m = cov_ms[m - int(seq_length/2)]
        Xm = multivariate_normal.rvs(cov=cov_m)
        # generate mean function for second
        Xmin = np.min(Xm)
        initial_val = Xm[-1]
        if Xmin > 1:
            final_val = (1.0 - M/seq_length)*Xmin
        else:
            final_val = (1.0 + M/seq_length)*Xmin
        mu_M = np.linspace(initial_val, final_val, M)
        # sample the rest
        cov_M = cov_Ms[m -int(seq_length/2)]
        XM = multivariate_normal.rvs(mean=mu_M, cov=cov_M)
        # combine the sequence
        # NOTE: just one dimension
        samples[n, :, 0] = np.concatenate([Xm, XM])
    pdf = partial(changepoint_pdf, cov_ms=cov_ms, cov_Ms=cov_Ms)
    return samples, pdf, m_s 
Example 80
Project: Weiss   Author: WangWenjun559   File: test_pairwise.py    Apache License 2.0 4 votes vote down vote up
def test_pairwise_kernels():
    # Test the pairwise_kernels helper function.

    rng = np.random.RandomState(0)
    X = rng.random_sample((5, 4))
    Y = rng.random_sample((2, 4))
    # Test with all metrics that should be in PAIRWISE_KERNEL_FUNCTIONS.
    test_metrics = ["rbf", "sigmoid", "polynomial", "linear", "chi2",
                    "additive_chi2"]
    for metric in test_metrics:
        function = PAIRWISE_KERNEL_FUNCTIONS[metric]
        # Test with Y=None
        K1 = pairwise_kernels(X, metric=metric)
        K2 = function(X)
        assert_array_almost_equal(K1, K2)
        # Test with Y=Y
        K1 = pairwise_kernels(X, Y=Y, metric=metric)
        K2 = function(X, Y=Y)
        assert_array_almost_equal(K1, K2)
        # Test with tuples as X and Y
        X_tuples = tuple([tuple([v for v in row]) for row in X])
        Y_tuples = tuple([tuple([v for v in row]) for row in Y])
        K2 = pairwise_kernels(X_tuples, Y_tuples, metric=metric)
        assert_array_almost_equal(K1, K2)

        # Test with sparse X and Y
        X_sparse = csr_matrix(X)
        Y_sparse = csr_matrix(Y)
        if metric in ["chi2", "additive_chi2"]:
            # these don't support sparse matrices yet
            assert_raises(ValueError, pairwise_kernels,
                          X_sparse, Y=Y_sparse, metric=metric)
            continue
        K1 = pairwise_kernels(X_sparse, Y=Y_sparse, metric=metric)
        assert_array_almost_equal(K1, K2)
    # Test with a callable function, with given keywords.
    metric = callable_rbf_kernel
    kwds = {}
    kwds['gamma'] = 0.1
    K1 = pairwise_kernels(X, Y=Y, metric=metric, **kwds)
    K2 = rbf_kernel(X, Y=Y, **kwds)
    assert_array_almost_equal(K1, K2)

    # callable function, X=Y
    K1 = pairwise_kernels(X, Y=X, metric=metric, **kwds)
    K2 = rbf_kernel(X, Y=X, **kwds)
    assert_array_almost_equal(K1, K2)