Python sklearn.metrics.pairwise.rbf_kernel() Examples

The following are 30 code examples of sklearn.metrics.pairwise.rbf_kernel(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module sklearn.metrics.pairwise , or try the search function .
Example #1
Source File: komd.py    From MKLpy with GNU General Public License v3.0 6 votes vote down vote up
def __kernel_definition__(self):
        """Select the kernel function
        
        Returns
        -------
        kernel : a callable relative to selected kernel
        """
        if hasattr(self.kernel, '__call__'):
            return self.kernel
        if self.kernel == 'rbf' or self.kernel == None:
            return lambda X,Y : rbf_kernel(X,Y,self.rbf_gamma)
        if self.kernel == 'poly':
            return lambda X,Y : polynomial_kernel(X, Y, degree=self.degree, gamma=self.rbf_gamma, coef0=self.coef0)
        if self.kernel == 'linear':
            return lambda X,Y : linear_kernel(X,Y)
        if self.kernel == 'precomputed':
            return lambda X,Y : X 
Example #2
Source File: test_kernel_approximation.py    From twitter-stock-recommendation with MIT License 6 votes vote down vote up
def test_nystroem_default_parameters():
    rnd = np.random.RandomState(42)
    X = rnd.uniform(size=(10, 4))

    # rbf kernel should behave as gamma=None by default
    # aka gamma = 1 / n_features
    nystroem = Nystroem(n_components=10)
    X_transformed = nystroem.fit_transform(X)
    K = rbf_kernel(X, gamma=None)
    K2 = np.dot(X_transformed, X_transformed.T)
    assert_array_almost_equal(K, K2)

    # chi2 kernel should behave as gamma=1 by default
    nystroem = Nystroem(kernel='chi2', n_components=10)
    X_transformed = nystroem.fit_transform(X)
    K = chi2_kernel(X, gamma=1)
    K2 = np.dot(X_transformed, X_transformed.T)
    assert_array_almost_equal(K, K2) 
Example #3
Source File: test_kernel_approximation.py    From twitter-stock-recommendation with MIT License 6 votes vote down vote up
def test_rbf_sampler():
    # test that RBFSampler approximates kernel on random data
    # compute exact kernel
    gamma = 10.
    kernel = rbf_kernel(X, Y, gamma=gamma)

    # approximate kernel mapping
    rbf_transform = RBFSampler(gamma=gamma, n_components=1000, random_state=42)
    X_trans = rbf_transform.fit_transform(X)
    Y_trans = rbf_transform.transform(Y)
    kernel_approx = np.dot(X_trans, Y_trans.T)

    error = kernel - kernel_approx
    assert_less_equal(np.abs(np.mean(error)), 0.01)  # close to unbiased
    np.abs(error, out=error)
    assert_less_equal(np.max(error), 0.1)  # nothing too far off
    assert_less_equal(np.mean(error), 0.05)  # mean is fairly close 
Example #4
Source File: test_spectral_embedding.py    From twitter-stock-recommendation with MIT License 6 votes vote down vote up
def test_spectral_embedding_unnormalized():
    # Test that spectral_embedding is also processing unnormalized laplacian
    # correctly
    random_state = np.random.RandomState(36)
    data = random_state.randn(10, 30)
    sims = rbf_kernel(data)
    n_components = 8
    embedding_1 = spectral_embedding(sims,
                                     norm_laplacian=False,
                                     n_components=n_components,
                                     drop_first=False)

    # Verify using manual computation with dense eigh
    laplacian, dd = sparse.csgraph.laplacian(sims, normed=False,
                                             return_diag=True)
    _, diffusion_map = eigh(laplacian)
    embedding_2 = diffusion_map.T[:n_components] * dd
    embedding_2 = _deterministic_vector_sign_flip(embedding_2).T

    assert_array_almost_equal(embedding_1, embedding_2) 
Example #5
Source File: test_spectral_embedding.py    From twitter-stock-recommendation with MIT License 6 votes vote down vote up
def test_spectral_embedding_callable_affinity(seed=36):
    # Test spectral embedding with callable affinity
    gamma = 0.9
    kern = rbf_kernel(S, gamma=gamma)
    se_callable = SpectralEmbedding(n_components=2,
                                    affinity=(
                                        lambda x: rbf_kernel(x, gamma=gamma)),
                                    gamma=gamma,
                                    random_state=np.random.RandomState(seed))
    se_rbf = SpectralEmbedding(n_components=2, affinity="rbf",
                               gamma=gamma,
                               random_state=np.random.RandomState(seed))
    embed_rbf = se_rbf.fit_transform(S)
    embed_callable = se_callable.fit_transform(S)
    assert_array_almost_equal(
        se_callable.affinity_matrix_, se_rbf.affinity_matrix_)
    assert_array_almost_equal(kern, se_rbf.affinity_matrix_)
    assert_true(
        _check_with_col_sign_flipping(embed_rbf, embed_callable, 0.05)) 
Example #6
Source File: test_coreg.py    From mvlearn with Apache License 2.0 6 votes vote down vote up
def test_affinity_mat_rbf2(data):

    v1_data = data['fit_data'][0]
    gamma = 1
    spectral = MultiviewCoRegSpectralClustering(random_state=RANDOM_STATE,
                                           gamma=gamma)
    distances = cdist(v1_data, v1_data)
    gamma = 1 / (2 * np.median(distances) ** 2)
    true_kernel = rbf_kernel(v1_data, gamma=1)
    g_kernel = spectral._affinity_mat(v1_data)

    assert(g_kernel.shape[0] == data['n_fit'])
    assert(g_kernel.shape[1] == data['n_fit'])

    for ind1 in range(g_kernel.shape[0]):
        for ind2 in range(g_kernel.shape[1]):
            assert np.abs(true_kernel[ind1][ind2]
                          - g_kernel[ind1][ind2]) < 0.000001 
Example #7
Source File: test_coreg.py    From mvlearn with Apache License 2.0 6 votes vote down vote up
def test_affinity_mat_rbf(data):
        
    v1_data = data['fit_data'][0]
    spectral = data['spectral']

    distances = cdist(v1_data, v1_data)
    gamma = 1 / (2 * np.median(distances) ** 2)
    true_kernel = rbf_kernel(v1_data, gamma=gamma)
    g_kernel = spectral._affinity_mat(v1_data)

    assert(g_kernel.shape[0] == data['n_fit'])
    assert(g_kernel.shape[1] == data['n_fit'])

    for ind1 in range(g_kernel.shape[0]):
        for ind2 in range(g_kernel.shape[1]):
            assert np.abs(true_kernel[ind1][ind2]
                          - g_kernel[ind1][ind2]) < 0.000001 
Example #8
Source File: test_spectral.py    From mvlearn with Apache License 2.0 6 votes vote down vote up
def test_compute_eigs(data):

    v1_data = data['fit_data'][0]
    g_kernel = rbf_kernel(v1_data, v1_data)
    n_clusts = data['n_clusters']
    n_fit = data['n_fit']

    spectral = data['spectral']
    eigs = spectral._compute_eigs(g_kernel)

    assert(eigs.shape[0] == n_fit)
    assert(eigs.shape[1] == n_clusts)

    col_mags = np.linalg.norm(eigs, axis=0)

    for val in col_mags:
        assert(np.abs(val - 1) < 0.000001) 
Example #9
Source File: test_spectral.py    From mvlearn with Apache License 2.0 6 votes vote down vote up
def test_affinity_mat_rbf2(data):

    v1_data = data['fit_data'][0]
    gamma = 1
    spectral = MultiviewSpectralClustering(random_state=RANDOM_STATE,
                                           gamma=gamma)
    distances = cdist(v1_data, v1_data)
    gamma = 1 / (2 * np.median(distances) ** 2)
    true_kernel = rbf_kernel(v1_data, gamma=1)
    g_kernel = spectral._affinity_mat(v1_data)

    assert(g_kernel.shape[0] == data['n_fit'])
    assert(g_kernel.shape[1] == data['n_fit'])

    for ind1 in range(g_kernel.shape[0]):
        for ind2 in range(g_kernel.shape[1]):
            assert np.abs(true_kernel[ind1][ind2]
                          - g_kernel[ind1][ind2]) < 0.000001 
Example #10
Source File: test_spectral.py    From mvlearn with Apache License 2.0 6 votes vote down vote up
def test_affinity_mat_rbf(data):
        
    v1_data = data['fit_data'][0]
    spectral = data['spectral']

    distances = cdist(v1_data, v1_data)
    gamma = 1 / (2 * np.median(distances) ** 2)
    true_kernel = rbf_kernel(v1_data, gamma=gamma)
    g_kernel = spectral._affinity_mat(v1_data)

    assert(g_kernel.shape[0] == data['n_fit'])
    assert(g_kernel.shape[1] == data['n_fit'])

    for ind1 in range(g_kernel.shape[0]):
        for ind2 in range(g_kernel.shape[1]):
            assert np.abs(true_kernel[ind1][ind2]
                          - g_kernel[ind1][ind2]) < 0.000001 
Example #11
Source File: _utils.py    From hyppo with Apache License 2.0 6 votes vote down vote up
def gaussian(x, workers=None):
    """Default medial gaussian kernel similarity calculation"""
    l1 = pairwise_distances(X=x, metric="l1", n_jobs=workers)
    n = l1.shape[0]
    med = np.median(
        np.lib.stride_tricks.as_strided(
            l1, (n - 1, n + 1), (l1.itemsize * (n + 1), l1.itemsize)
        )[:, 1:]
    )
    # prevents division by zero when used on label vectors
    med = med if med else 1
    gamma = 1.0 / (2 * (med ** 2))
    return rbf_kernel(x, gamma=gamma)


# p-value computation 
Example #12
Source File: data_utils.py    From RGAN with MIT License 6 votes vote down vote up
def GP(seq_length=30, num_samples=28*5*100, num_signals=1, scale=0.1, kernel='rbf', **kwargs):
    # the shape of the samples is num_samples x seq_length x num_signals
    samples = np.empty(shape=(num_samples, seq_length, num_signals))
    #T = np.arange(seq_length)/seq_length    # note, between 0 and 1
    T = np.arange(seq_length)    # note, not between 0 and 1
    if kernel == 'periodic':
        cov = periodic_kernel(T)
    elif kernel =='rbf':
        cov = rbf_kernel(T.reshape(-1, 1), gamma=scale)
    else:
        raise NotImplementedError
    # scale the covariance
    cov *= 0.2
    # define the distribution
    mu = np.zeros(seq_length)
    print(np.linalg.det(cov))
    distribution = multivariate_normal(mean=np.zeros(cov.shape[0]), cov=cov)
    pdf = distribution.logpdf
    # now generate samples
    for i in range(num_signals):
        samples[:, :, i] = distribution.rvs(size=num_samples)
    return samples, pdf 
Example #13
Source File: classification_based.py    From kenchi with BSD 3-Clause "New" or "Revised" License 6 votes vote down vote up
def _fit(self, X):
        self.estimator_  = OneClassSVM(
            cache_size   = self.cache_size,
            gamma        = self.gamma,
            max_iter     = self.max_iter,
            nu           = self.nu,
            shrinking    = self.shrinking,
            tol          = self.tol
        ).fit(X)

        l,               = self.support_.shape
        self.nu_l_       = self.nu * l

        Q                = rbf_kernel(
            self.support_vectors_, gamma=self.estimator_._gamma
        )
        c2               = (self.dual_coef_ @ Q @ self.dual_coef_.T)[0, 0]
        self.R2_         = c2 + 2. * self.intercept_[0] + 1.

        return self 
Example #14
Source File: test_kernel_pca.py    From Mastering-Elasticsearch-7.0 with MIT License 6 votes vote down vote up
def test_gridsearch_pipeline_precomputed():
    # Test if we can do a grid-search to find parameters to separate
    # circles with a perceptron model using a precomputed kernel.
    X, y = make_circles(n_samples=400, factor=.3, noise=.05,
                        random_state=0)
    kpca = KernelPCA(kernel="precomputed", n_components=2)
    pipeline = Pipeline([("kernel_pca", kpca),
                         ("Perceptron", Perceptron(max_iter=5))])
    param_grid = dict(Perceptron__max_iter=np.arange(1, 5))
    grid_search = GridSearchCV(pipeline, cv=3, param_grid=param_grid)
    X_kernel = rbf_kernel(X, gamma=2.)
    grid_search.fit(X_kernel, y)
    assert_equal(grid_search.best_score_, 1)


# 0.23. warning about tol not having its correct default value. 
Example #15
Source File: test_kernel_approximation.py    From Mastering-Elasticsearch-7.0 with MIT License 6 votes vote down vote up
def test_nystroem_default_parameters():
    rnd = np.random.RandomState(42)
    X = rnd.uniform(size=(10, 4))

    # rbf kernel should behave as gamma=None by default
    # aka gamma = 1 / n_features
    nystroem = Nystroem(n_components=10)
    X_transformed = nystroem.fit_transform(X)
    K = rbf_kernel(X, gamma=None)
    K2 = np.dot(X_transformed, X_transformed.T)
    assert_array_almost_equal(K, K2)

    # chi2 kernel should behave as gamma=1 by default
    nystroem = Nystroem(kernel='chi2', n_components=10)
    X_transformed = nystroem.fit_transform(X)
    K = chi2_kernel(X, gamma=1)
    K2 = np.dot(X_transformed, X_transformed.T)
    assert_array_almost_equal(K, K2) 
Example #16
Source File: test_svm.py    From Mastering-Elasticsearch-7.0 with MIT License 6 votes vote down vote up
def test_svr_predict():
    # Test SVR's decision_function
    # Sanity check, test that predict implemented in python
    # returns the same as the one in libsvm

    X = iris.data
    y = iris.target

    # linear kernel
    reg = svm.SVR(kernel='linear', C=0.1).fit(X, y)

    dec = np.dot(X, reg.coef_.T) + reg.intercept_
    assert_array_almost_equal(dec.ravel(), reg.predict(X).ravel())

    # rbf kernel
    reg = svm.SVR(kernel='rbf', gamma=1).fit(X, y)

    rbfs = rbf_kernel(X, reg.support_vectors_, gamma=reg.gamma)
    dec = np.dot(rbfs, reg.dual_coef_.T) + reg.intercept_
    assert_array_almost_equal(dec.ravel(), reg.predict(X).ravel()) 
Example #17
Source File: test_pairwise.py    From Mastering-Elasticsearch-7.0 with MIT License 6 votes vote down vote up
def test_pairwise_kernels_callable():
    # Test the pairwise_kernels helper function
    # with a callable function, with given keywords.
    rng = np.random.RandomState(0)
    X = rng.random_sample((5, 4))
    Y = rng.random_sample((2, 4))

    metric = callable_rbf_kernel
    kwds = {'gamma': 0.1}
    K1 = pairwise_kernels(X, Y=Y, metric=metric, **kwds)
    K2 = rbf_kernel(X, Y=Y, **kwds)
    assert_array_almost_equal(K1, K2)

    # callable function, X=Y
    K1 = pairwise_kernels(X, Y=X, metric=metric, **kwds)
    K2 = rbf_kernel(X, Y=X, **kwds)
    assert_array_almost_equal(K1, K2) 
Example #18
Source File: test_spectral_embedding.py    From Mastering-Elasticsearch-7.0 with MIT License 6 votes vote down vote up
def test_spectral_embedding_unnormalized():
    # Test that spectral_embedding is also processing unnormalized laplacian
    # correctly
    random_state = np.random.RandomState(36)
    data = random_state.randn(10, 30)
    sims = rbf_kernel(data)
    n_components = 8
    embedding_1 = spectral_embedding(sims,
                                     norm_laplacian=False,
                                     n_components=n_components,
                                     drop_first=False)

    # Verify using manual computation with dense eigh
    laplacian, dd = csgraph.laplacian(sims, normed=False,
                                      return_diag=True)
    _, diffusion_map = eigh(laplacian)
    embedding_2 = diffusion_map.T[:n_components]
    embedding_2 = _deterministic_vector_sign_flip(embedding_2).T

    assert_array_almost_equal(embedding_1, embedding_2) 
Example #19
Source File: kernels.py    From BrainSpace with BSD 3-Clause "New" or "Revised" License 6 votes vote down vote up
def _build_kernel(x, kernel, gamma=None):

    if kernel in {'pearson', 'spearman'}:
        if kernel == 'spearman':
            x = np.apply_along_axis(rankdata, 1, x)
        return np.corrcoef(x)

    if kernel in {'cosine', 'normalized_angle'}:
        x = 1 - squareform(pdist(x, metric='cosine'))
        if kernel == 'normalized_angle':
            x = 1 - np.arccos(x, x)/np.pi
        return x

    if kernel == 'gaussian':
        if gamma is None:
            gamma = 1 / x.shape[1]
        return rbf_kernel(x, gamma=gamma)

    if callable(kernel):
        return kernel(x)

    raise ValueError("Unknown kernel '{0}'.".format(kernel)) 
Example #20
Source File: test_fastfood.py    From scikit-learn-extra with BSD 3-Clause "New" or "Revised" License 6 votes vote down vote up
def test_fastfood():
    """test that Fastfood fast approximates kernel on random data"""
    # compute exact kernel
    gamma = 10.0
    kernel = rbf_kernel(X, Y, gamma=gamma)

    sigma = np.sqrt(1 / (2 * gamma))

    # approximate kernel mapping
    ff_transform = Fastfood(sigma, n_components=1000, random_state=42)

    pars = ff_transform.fit(X)
    X_trans = pars.transform(X)
    Y_trans = ff_transform.transform(Y)

    kernel_approx = np.dot(X_trans, Y_trans.T)

    print("approximation:", kernel_approx[:5, :5])
    print("true kernel:", kernel[:5, :5])
    assert_array_almost_equal(kernel, kernel_approx, decimal=1) 
Example #21
Source File: test_spectral_embedding.py    From Mastering-Elasticsearch-7.0 with MIT License 6 votes vote down vote up
def test_spectral_embedding_first_eigen_vector():
    # Test that the first eigenvector of spectral_embedding
    # is constant and that the second is not (for a connected graph)
    random_state = np.random.RandomState(36)
    data = random_state.randn(10, 30)
    sims = rbf_kernel(data)
    n_components = 2

    for seed in range(10):
        embedding = spectral_embedding(sims,
                                       norm_laplacian=False,
                                       n_components=n_components,
                                       drop_first=False,
                                       random_state=seed)

        assert np.std(embedding[:, 0]) == pytest.approx(0)
        assert np.std(embedding[:, 1]) > 1e-3 
Example #22
Source File: test_pairwise.py    From twitter-stock-recommendation with MIT License 5 votes vote down vote up
def test_kernel_symmetry():
    # Valid kernels should be symmetric
    rng = np.random.RandomState(0)
    X = rng.random_sample((5, 4))
    for kernel in (linear_kernel, polynomial_kernel, rbf_kernel,
                   laplacian_kernel, sigmoid_kernel, cosine_similarity):
        K = kernel(X, X)
        assert_array_almost_equal(K, K.T, 15) 
Example #23
Source File: test_spectral_embedding.py    From Mastering-Elasticsearch-7.0 with MIT License 5 votes vote down vote up
def test_spectral_embedding_precomputed_affinity(seed=36):
    # Test spectral embedding with precomputed kernel
    gamma = 1.0
    se_precomp = SpectralEmbedding(n_components=2, affinity="precomputed",
                                   random_state=np.random.RandomState(seed))
    se_rbf = SpectralEmbedding(n_components=2, affinity="rbf",
                               gamma=gamma,
                               random_state=np.random.RandomState(seed))
    embed_precomp = se_precomp.fit_transform(rbf_kernel(S, gamma=gamma))
    embed_rbf = se_rbf.fit_transform(S)
    assert_array_almost_equal(
        se_precomp.affinity_matrix_, se_rbf.affinity_matrix_)
    assert _check_with_col_sign_flipping(embed_precomp, embed_rbf, 0.05) 
Example #24
Source File: test_pairwise.py    From Mastering-Elasticsearch-7.0 with MIT License 5 votes vote down vote up
def test_rbf_kernel():
    rng = np.random.RandomState(0)
    X = rng.random_sample((5, 4))
    K = rbf_kernel(X, X)
    # the diagonal elements of a rbf kernel are 1
    assert_array_almost_equal(K.flat[::6], np.ones(5)) 
Example #25
Source File: test_pairwise.py    From Mastering-Elasticsearch-7.0 with MIT License 5 votes vote down vote up
def test_pairwise_kernels_filter_param():
    rng = np.random.RandomState(0)
    X = rng.random_sample((5, 4))
    Y = rng.random_sample((2, 4))
    K = rbf_kernel(X, Y, gamma=0.1)
    params = {"gamma": 0.1, "blabla": ":)"}
    K2 = pairwise_kernels(X, Y, metric="rbf", filter_params=True, **params)
    assert_array_almost_equal(K, K2)

    assert_raises(TypeError, pairwise_kernels, X, Y, "rbf", **params) 
Example #26
Source File: test_spectral.py    From twitter-stock-recommendation with MIT License 5 votes vote down vote up
def test_spectral_clustering_sparse():
    X, y = make_blobs(n_samples=20, random_state=0,
                      centers=[[1, 1], [-1, -1]], cluster_std=0.01)

    S = rbf_kernel(X, gamma=1)
    S = np.maximum(S - 1e-4, 0)
    S = sparse.coo_matrix(S)

    labels = SpectralClustering(random_state=0, n_clusters=2,
                                affinity='precomputed').fit(S).labels_
    assert_equal(adjusted_rand_score(y, labels), 1) 
Example #27
Source File: test_pairwise.py    From Mastering-Elasticsearch-7.0 with MIT License 5 votes vote down vote up
def callable_rbf_kernel(x, y, **kwds):
    # Callable version of pairwise.rbf_kernel.
    K = rbf_kernel(np.atleast_2d(x), np.atleast_2d(y), **kwds)
    return K 
Example #28
Source File: mv_spectral.py    From mvlearn with Apache License 2.0 5 votes vote down vote up
def _affinity_mat(self, X):

        r'''
        Computes the affinity matrix based on the selected
        kernel type.

        Parameters
        ----------
        X : array-like, shape (n_samples, n_features)
            The data matrix from which we will compute the
            affinity matrix.

        Returns
        -------
        sims : array-like, shape (n_samples, n_samples)
            The resulting affinity kernel.

        '''

        sims = None

        # If gamma is None, then compute default gamma value for this view
        gamma = self.gamma
        if self.gamma is None:
            distances = cdist(X, X)
            gamma = 1 / (2 * np.median(distances) ** 2)
        # Produce the affinity matrix based on the selected kernel type
        if (self.affinity == 'rbf'):
            sims = rbf_kernel(X, gamma=gamma)
        elif(self.affinity == 'nearest_neighbors'):
            neighbor = NearestNeighbors(n_neighbors=self.n_neighbors)
            neighbor.fit(X)
            sims = neighbor.kneighbors_graph(X).toarray()
        else:
            sims = polynomial_kernel(X, gamma=gamma)

        return sims 
Example #29
Source File: test_pairwise.py    From twitter-stock-recommendation with MIT License 5 votes vote down vote up
def callable_rbf_kernel(x, y, **kwds):
    # Callable version of pairwise.rbf_kernel.
    K = rbf_kernel(np.atleast_2d(x), np.atleast_2d(y), **kwds)
    return K 
Example #30
Source File: test_pairwise.py    From twitter-stock-recommendation with MIT License 5 votes vote down vote up
def test_pairwise_kernels_filter_param():
    rng = np.random.RandomState(0)
    X = rng.random_sample((5, 4))
    Y = rng.random_sample((2, 4))
    K = rbf_kernel(X, Y, gamma=0.1)
    params = {"gamma": 0.1, "blabla": ":)"}
    K2 = pairwise_kernels(X, Y, metric="rbf", filter_params=True, **params)
    assert_array_almost_equal(K, K2)

    assert_raises(TypeError, pairwise_kernels, X, Y, "rbf", **params)