Python sklearn.metrics.pairwise.rbf_kernel() Examples

The following are 30 code examples of sklearn.metrics.pairwise.rbf_kernel(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module sklearn.metrics.pairwise , or try the search function .
Example #1
Source Project: MKLpy   Author: IvanoLauriola   File: komd.py    License: GNU General Public License v3.0 6 votes vote down vote up
def __kernel_definition__(self):
        """Select the kernel function
        
        Returns
        -------
        kernel : a callable relative to selected kernel
        """
        if hasattr(self.kernel, '__call__'):
            return self.kernel
        if self.kernel == 'rbf' or self.kernel == None:
            return lambda X,Y : rbf_kernel(X,Y,self.rbf_gamma)
        if self.kernel == 'poly':
            return lambda X,Y : polynomial_kernel(X, Y, degree=self.degree, gamma=self.rbf_gamma, coef0=self.coef0)
        if self.kernel == 'linear':
            return lambda X,Y : linear_kernel(X,Y)
        if self.kernel == 'precomputed':
            return lambda X,Y : X 
Example #2
Source Project: scikit-learn-extra   Author: scikit-learn-contrib   File: test_fastfood.py    License: BSD 3-Clause "New" or "Revised" License 6 votes vote down vote up
def test_fastfood():
    """test that Fastfood fast approximates kernel on random data"""
    # compute exact kernel
    gamma = 10.0
    kernel = rbf_kernel(X, Y, gamma=gamma)

    sigma = np.sqrt(1 / (2 * gamma))

    # approximate kernel mapping
    ff_transform = Fastfood(sigma, n_components=1000, random_state=42)

    pars = ff_transform.fit(X)
    X_trans = pars.transform(X)
    Y_trans = ff_transform.transform(Y)

    kernel_approx = np.dot(X_trans, Y_trans.T)

    print("approximation:", kernel_approx[:5, :5])
    print("true kernel:", kernel[:5, :5])
    assert_array_almost_equal(kernel, kernel_approx, decimal=1) 
Example #3
Source Project: BrainSpace   Author: MICA-MNI   File: kernels.py    License: BSD 3-Clause "New" or "Revised" License 6 votes vote down vote up
def _build_kernel(x, kernel, gamma=None):

    if kernel in {'pearson', 'spearman'}:
        if kernel == 'spearman':
            x = np.apply_along_axis(rankdata, 1, x)
        return np.corrcoef(x)

    if kernel in {'cosine', 'normalized_angle'}:
        x = 1 - squareform(pdist(x, metric='cosine'))
        if kernel == 'normalized_angle':
            x = 1 - np.arccos(x, x)/np.pi
        return x

    if kernel == 'gaussian':
        if gamma is None:
            gamma = 1 / x.shape[1]
        return rbf_kernel(x, gamma=gamma)

    if callable(kernel):
        return kernel(x)

    raise ValueError("Unknown kernel '{0}'.".format(kernel)) 
Example #4
Source Project: Mastering-Elasticsearch-7.0   Author: PacktPublishing   File: test_pairwise.py    License: MIT License 6 votes vote down vote up
def test_pairwise_kernels_callable():
    # Test the pairwise_kernels helper function
    # with a callable function, with given keywords.
    rng = np.random.RandomState(0)
    X = rng.random_sample((5, 4))
    Y = rng.random_sample((2, 4))

    metric = callable_rbf_kernel
    kwds = {'gamma': 0.1}
    K1 = pairwise_kernels(X, Y=Y, metric=metric, **kwds)
    K2 = rbf_kernel(X, Y=Y, **kwds)
    assert_array_almost_equal(K1, K2)

    # callable function, X=Y
    K1 = pairwise_kernels(X, Y=X, metric=metric, **kwds)
    K2 = rbf_kernel(X, Y=X, **kwds)
    assert_array_almost_equal(K1, K2) 
Example #5
Source Project: Mastering-Elasticsearch-7.0   Author: PacktPublishing   File: test_spectral_embedding.py    License: MIT License 6 votes vote down vote up
def test_spectral_embedding_unnormalized():
    # Test that spectral_embedding is also processing unnormalized laplacian
    # correctly
    random_state = np.random.RandomState(36)
    data = random_state.randn(10, 30)
    sims = rbf_kernel(data)
    n_components = 8
    embedding_1 = spectral_embedding(sims,
                                     norm_laplacian=False,
                                     n_components=n_components,
                                     drop_first=False)

    # Verify using manual computation with dense eigh
    laplacian, dd = csgraph.laplacian(sims, normed=False,
                                      return_diag=True)
    _, diffusion_map = eigh(laplacian)
    embedding_2 = diffusion_map.T[:n_components]
    embedding_2 = _deterministic_vector_sign_flip(embedding_2).T

    assert_array_almost_equal(embedding_1, embedding_2) 
Example #6
Source Project: Mastering-Elasticsearch-7.0   Author: PacktPublishing   File: test_spectral_embedding.py    License: MIT License 6 votes vote down vote up
def test_spectral_embedding_first_eigen_vector():
    # Test that the first eigenvector of spectral_embedding
    # is constant and that the second is not (for a connected graph)
    random_state = np.random.RandomState(36)
    data = random_state.randn(10, 30)
    sims = rbf_kernel(data)
    n_components = 2

    for seed in range(10):
        embedding = spectral_embedding(sims,
                                       norm_laplacian=False,
                                       n_components=n_components,
                                       drop_first=False,
                                       random_state=seed)

        assert np.std(embedding[:, 0]) == pytest.approx(0)
        assert np.std(embedding[:, 1]) > 1e-3 
Example #7
Source Project: Mastering-Elasticsearch-7.0   Author: PacktPublishing   File: test_svm.py    License: MIT License 6 votes vote down vote up
def test_svr_predict():
    # Test SVR's decision_function
    # Sanity check, test that predict implemented in python
    # returns the same as the one in libsvm

    X = iris.data
    y = iris.target

    # linear kernel
    reg = svm.SVR(kernel='linear', C=0.1).fit(X, y)

    dec = np.dot(X, reg.coef_.T) + reg.intercept_
    assert_array_almost_equal(dec.ravel(), reg.predict(X).ravel())

    # rbf kernel
    reg = svm.SVR(kernel='rbf', gamma=1).fit(X, y)

    rbfs = rbf_kernel(X, reg.support_vectors_, gamma=reg.gamma)
    dec = np.dot(rbfs, reg.dual_coef_.T) + reg.intercept_
    assert_array_almost_equal(dec.ravel(), reg.predict(X).ravel()) 
Example #8
Source Project: Mastering-Elasticsearch-7.0   Author: PacktPublishing   File: test_kernel_approximation.py    License: MIT License 6 votes vote down vote up
def test_nystroem_default_parameters():
    rnd = np.random.RandomState(42)
    X = rnd.uniform(size=(10, 4))

    # rbf kernel should behave as gamma=None by default
    # aka gamma = 1 / n_features
    nystroem = Nystroem(n_components=10)
    X_transformed = nystroem.fit_transform(X)
    K = rbf_kernel(X, gamma=None)
    K2 = np.dot(X_transformed, X_transformed.T)
    assert_array_almost_equal(K, K2)

    # chi2 kernel should behave as gamma=1 by default
    nystroem = Nystroem(kernel='chi2', n_components=10)
    X_transformed = nystroem.fit_transform(X)
    K = chi2_kernel(X, gamma=1)
    K2 = np.dot(X_transformed, X_transformed.T)
    assert_array_almost_equal(K, K2) 
Example #9
Source Project: Mastering-Elasticsearch-7.0   Author: PacktPublishing   File: test_kernel_pca.py    License: MIT License 6 votes vote down vote up
def test_gridsearch_pipeline_precomputed():
    # Test if we can do a grid-search to find parameters to separate
    # circles with a perceptron model using a precomputed kernel.
    X, y = make_circles(n_samples=400, factor=.3, noise=.05,
                        random_state=0)
    kpca = KernelPCA(kernel="precomputed", n_components=2)
    pipeline = Pipeline([("kernel_pca", kpca),
                         ("Perceptron", Perceptron(max_iter=5))])
    param_grid = dict(Perceptron__max_iter=np.arange(1, 5))
    grid_search = GridSearchCV(pipeline, cv=3, param_grid=param_grid)
    X_kernel = rbf_kernel(X, gamma=2.)
    grid_search.fit(X_kernel, y)
    assert_equal(grid_search.best_score_, 1)


# 0.23. warning about tol not having its correct default value. 
Example #10
Source Project: kenchi   Author: Y-oHr-N   File: classification_based.py    License: BSD 3-Clause "New" or "Revised" License 6 votes vote down vote up
def _fit(self, X):
        self.estimator_  = OneClassSVM(
            cache_size   = self.cache_size,
            gamma        = self.gamma,
            max_iter     = self.max_iter,
            nu           = self.nu,
            shrinking    = self.shrinking,
            tol          = self.tol
        ).fit(X)

        l,               = self.support_.shape
        self.nu_l_       = self.nu * l

        Q                = rbf_kernel(
            self.support_vectors_, gamma=self.estimator_._gamma
        )
        c2               = (self.dual_coef_ @ Q @ self.dual_coef_.T)[0, 0]
        self.R2_         = c2 + 2. * self.intercept_[0] + 1.

        return self 
Example #11
Source Project: RGAN   Author: ratschlab   File: data_utils.py    License: MIT License 6 votes vote down vote up
def GP(seq_length=30, num_samples=28*5*100, num_signals=1, scale=0.1, kernel='rbf', **kwargs):
    # the shape of the samples is num_samples x seq_length x num_signals
    samples = np.empty(shape=(num_samples, seq_length, num_signals))
    #T = np.arange(seq_length)/seq_length    # note, between 0 and 1
    T = np.arange(seq_length)    # note, not between 0 and 1
    if kernel == 'periodic':
        cov = periodic_kernel(T)
    elif kernel =='rbf':
        cov = rbf_kernel(T.reshape(-1, 1), gamma=scale)
    else:
        raise NotImplementedError
    # scale the covariance
    cov *= 0.2
    # define the distribution
    mu = np.zeros(seq_length)
    print(np.linalg.det(cov))
    distribution = multivariate_normal(mean=np.zeros(cov.shape[0]), cov=cov)
    pdf = distribution.logpdf
    # now generate samples
    for i in range(num_signals):
        samples[:, :, i] = distribution.rvs(size=num_samples)
    return samples, pdf 
Example #12
Source Project: hyppo   Author: neurodata   File: _utils.py    License: Apache License 2.0 6 votes vote down vote up
def gaussian(x, workers=None):
    """Default medial gaussian kernel similarity calculation"""
    l1 = pairwise_distances(X=x, metric="l1", n_jobs=workers)
    n = l1.shape[0]
    med = np.median(
        np.lib.stride_tricks.as_strided(
            l1, (n - 1, n + 1), (l1.itemsize * (n + 1), l1.itemsize)
        )[:, 1:]
    )
    # prevents division by zero when used on label vectors
    med = med if med else 1
    gamma = 1.0 / (2 * (med ** 2))
    return rbf_kernel(x, gamma=gamma)


# p-value computation 
Example #13
Source Project: mvlearn   Author: neurodata   File: test_spectral.py    License: Apache License 2.0 6 votes vote down vote up
def test_affinity_mat_rbf(data):
        
    v1_data = data['fit_data'][0]
    spectral = data['spectral']

    distances = cdist(v1_data, v1_data)
    gamma = 1 / (2 * np.median(distances) ** 2)
    true_kernel = rbf_kernel(v1_data, gamma=gamma)
    g_kernel = spectral._affinity_mat(v1_data)

    assert(g_kernel.shape[0] == data['n_fit'])
    assert(g_kernel.shape[1] == data['n_fit'])

    for ind1 in range(g_kernel.shape[0]):
        for ind2 in range(g_kernel.shape[1]):
            assert np.abs(true_kernel[ind1][ind2]
                          - g_kernel[ind1][ind2]) < 0.000001 
Example #14
Source Project: mvlearn   Author: neurodata   File: test_spectral.py    License: Apache License 2.0 6 votes vote down vote up
def test_affinity_mat_rbf2(data):

    v1_data = data['fit_data'][0]
    gamma = 1
    spectral = MultiviewSpectralClustering(random_state=RANDOM_STATE,
                                           gamma=gamma)
    distances = cdist(v1_data, v1_data)
    gamma = 1 / (2 * np.median(distances) ** 2)
    true_kernel = rbf_kernel(v1_data, gamma=1)
    g_kernel = spectral._affinity_mat(v1_data)

    assert(g_kernel.shape[0] == data['n_fit'])
    assert(g_kernel.shape[1] == data['n_fit'])

    for ind1 in range(g_kernel.shape[0]):
        for ind2 in range(g_kernel.shape[1]):
            assert np.abs(true_kernel[ind1][ind2]
                          - g_kernel[ind1][ind2]) < 0.000001 
Example #15
Source Project: mvlearn   Author: neurodata   File: test_spectral.py    License: Apache License 2.0 6 votes vote down vote up
def test_compute_eigs(data):

    v1_data = data['fit_data'][0]
    g_kernel = rbf_kernel(v1_data, v1_data)
    n_clusts = data['n_clusters']
    n_fit = data['n_fit']

    spectral = data['spectral']
    eigs = spectral._compute_eigs(g_kernel)

    assert(eigs.shape[0] == n_fit)
    assert(eigs.shape[1] == n_clusts)

    col_mags = np.linalg.norm(eigs, axis=0)

    for val in col_mags:
        assert(np.abs(val - 1) < 0.000001) 
Example #16
Source Project: mvlearn   Author: neurodata   File: test_coreg.py    License: Apache License 2.0 6 votes vote down vote up
def test_affinity_mat_rbf(data):
        
    v1_data = data['fit_data'][0]
    spectral = data['spectral']

    distances = cdist(v1_data, v1_data)
    gamma = 1 / (2 * np.median(distances) ** 2)
    true_kernel = rbf_kernel(v1_data, gamma=gamma)
    g_kernel = spectral._affinity_mat(v1_data)

    assert(g_kernel.shape[0] == data['n_fit'])
    assert(g_kernel.shape[1] == data['n_fit'])

    for ind1 in range(g_kernel.shape[0]):
        for ind2 in range(g_kernel.shape[1]):
            assert np.abs(true_kernel[ind1][ind2]
                          - g_kernel[ind1][ind2]) < 0.000001 
Example #17
Source Project: mvlearn   Author: neurodata   File: test_coreg.py    License: Apache License 2.0 6 votes vote down vote up
def test_affinity_mat_rbf2(data):

    v1_data = data['fit_data'][0]
    gamma = 1
    spectral = MultiviewCoRegSpectralClustering(random_state=RANDOM_STATE,
                                           gamma=gamma)
    distances = cdist(v1_data, v1_data)
    gamma = 1 / (2 * np.median(distances) ** 2)
    true_kernel = rbf_kernel(v1_data, gamma=1)
    g_kernel = spectral._affinity_mat(v1_data)

    assert(g_kernel.shape[0] == data['n_fit'])
    assert(g_kernel.shape[1] == data['n_fit'])

    for ind1 in range(g_kernel.shape[0]):
        for ind2 in range(g_kernel.shape[1]):
            assert np.abs(true_kernel[ind1][ind2]
                          - g_kernel[ind1][ind2]) < 0.000001 
Example #18
Source Project: twitter-stock-recommendation   Author: alvarobartt   File: test_spectral_embedding.py    License: MIT License 6 votes vote down vote up
def test_spectral_embedding_callable_affinity(seed=36):
    # Test spectral embedding with callable affinity
    gamma = 0.9
    kern = rbf_kernel(S, gamma=gamma)
    se_callable = SpectralEmbedding(n_components=2,
                                    affinity=(
                                        lambda x: rbf_kernel(x, gamma=gamma)),
                                    gamma=gamma,
                                    random_state=np.random.RandomState(seed))
    se_rbf = SpectralEmbedding(n_components=2, affinity="rbf",
                               gamma=gamma,
                               random_state=np.random.RandomState(seed))
    embed_rbf = se_rbf.fit_transform(S)
    embed_callable = se_callable.fit_transform(S)
    assert_array_almost_equal(
        se_callable.affinity_matrix_, se_rbf.affinity_matrix_)
    assert_array_almost_equal(kern, se_rbf.affinity_matrix_)
    assert_true(
        _check_with_col_sign_flipping(embed_rbf, embed_callable, 0.05)) 
Example #19
Source Project: twitter-stock-recommendation   Author: alvarobartt   File: test_spectral_embedding.py    License: MIT License 6 votes vote down vote up
def test_spectral_embedding_unnormalized():
    # Test that spectral_embedding is also processing unnormalized laplacian
    # correctly
    random_state = np.random.RandomState(36)
    data = random_state.randn(10, 30)
    sims = rbf_kernel(data)
    n_components = 8
    embedding_1 = spectral_embedding(sims,
                                     norm_laplacian=False,
                                     n_components=n_components,
                                     drop_first=False)

    # Verify using manual computation with dense eigh
    laplacian, dd = sparse.csgraph.laplacian(sims, normed=False,
                                             return_diag=True)
    _, diffusion_map = eigh(laplacian)
    embedding_2 = diffusion_map.T[:n_components] * dd
    embedding_2 = _deterministic_vector_sign_flip(embedding_2).T

    assert_array_almost_equal(embedding_1, embedding_2) 
Example #20
Source Project: twitter-stock-recommendation   Author: alvarobartt   File: test_kernel_approximation.py    License: MIT License 6 votes vote down vote up
def test_rbf_sampler():
    # test that RBFSampler approximates kernel on random data
    # compute exact kernel
    gamma = 10.
    kernel = rbf_kernel(X, Y, gamma=gamma)

    # approximate kernel mapping
    rbf_transform = RBFSampler(gamma=gamma, n_components=1000, random_state=42)
    X_trans = rbf_transform.fit_transform(X)
    Y_trans = rbf_transform.transform(Y)
    kernel_approx = np.dot(X_trans, Y_trans.T)

    error = kernel - kernel_approx
    assert_less_equal(np.abs(np.mean(error)), 0.01)  # close to unbiased
    np.abs(error, out=error)
    assert_less_equal(np.max(error), 0.1)  # nothing too far off
    assert_less_equal(np.mean(error), 0.05)  # mean is fairly close 
Example #21
Source Project: twitter-stock-recommendation   Author: alvarobartt   File: test_kernel_approximation.py    License: MIT License 6 votes vote down vote up
def test_nystroem_default_parameters():
    rnd = np.random.RandomState(42)
    X = rnd.uniform(size=(10, 4))

    # rbf kernel should behave as gamma=None by default
    # aka gamma = 1 / n_features
    nystroem = Nystroem(n_components=10)
    X_transformed = nystroem.fit_transform(X)
    K = rbf_kernel(X, gamma=None)
    K2 = np.dot(X_transformed, X_transformed.T)
    assert_array_almost_equal(K, K2)

    # chi2 kernel should behave as gamma=1 by default
    nystroem = Nystroem(kernel='chi2', n_components=10)
    X_transformed = nystroem.fit_transform(X)
    K = chi2_kernel(X, gamma=1)
    K2 = np.dot(X_transformed, X_transformed.T)
    assert_array_almost_equal(K, K2) 
Example #22
Source Project: scanorama   Author: brianhie   File: scanorama.py    License: MIT License 5 votes vote down vote up
def batch_bias(curr_ds, match_ds, bias, batch_size=None, sigma=SIGMA):
    if batch_size is None:
        weights = rbf_kernel(curr_ds, match_ds, gamma=0.5*sigma)
        weights = normalize(weights, axis=1, norm='l1')
        avg_bias = np.dot(weights, bias)
        return avg_bias

    base = 0
    avg_bias = np.zeros(curr_ds.shape)
    denom = np.zeros(curr_ds.shape[0])
    while base < match_ds.shape[0]:
        batch_idx = range(
            base, min(base + batch_size, match_ds.shape[0])
        )
        weights = rbf_kernel(curr_ds, match_ds[batch_idx, :],
                             gamma=0.5*sigma)
        avg_bias += np.dot(weights, bias[batch_idx, :])
        denom += np.sum(weights, axis=1)
        base += batch_size

    denom = handle_zeros_in_scale(denom, copy=False)
    avg_bias /= denom[:, np.newaxis]

    return avg_bias

# Compute nonlinear translation vectors between dataset
# and a reference. 
Example #23
Source Project: mars   Author: mars-project   File: test_rbf_kernel.py    License: Apache License 2.0 5 votes vote down vote up
def testRbfKernel(self):
        rs = np.random.RandomState(0)
        raw_X = rs.rand(10, 4)
        raw_Y = rs.rand(11, 4)

        r = rbf_kernel(raw_X, raw_Y)
        result = r.to_numpy()
        expected = sklearn_rbf_kernel(raw_X, raw_Y)

        np.testing.assert_almost_equal(result, expected) 
Example #24
Source Project: Mastering-Elasticsearch-7.0   Author: PacktPublishing   File: test_pairwise.py    License: MIT License 5 votes vote down vote up
def callable_rbf_kernel(x, y, **kwds):
    # Callable version of pairwise.rbf_kernel.
    K = rbf_kernel(np.atleast_2d(x), np.atleast_2d(y), **kwds)
    return K 
Example #25
Source Project: Mastering-Elasticsearch-7.0   Author: PacktPublishing   File: test_pairwise.py    License: MIT License 5 votes vote down vote up
def test_pairwise_kernels_filter_param():
    rng = np.random.RandomState(0)
    X = rng.random_sample((5, 4))
    Y = rng.random_sample((2, 4))
    K = rbf_kernel(X, Y, gamma=0.1)
    params = {"gamma": 0.1, "blabla": ":)"}
    K2 = pairwise_kernels(X, Y, metric="rbf", filter_params=True, **params)
    assert_array_almost_equal(K, K2)

    assert_raises(TypeError, pairwise_kernels, X, Y, "rbf", **params) 
Example #26
Source Project: Mastering-Elasticsearch-7.0   Author: PacktPublishing   File: test_pairwise.py    License: MIT License 5 votes vote down vote up
def test_rbf_kernel():
    rng = np.random.RandomState(0)
    X = rng.random_sample((5, 4))
    K = rbf_kernel(X, X)
    # the diagonal elements of a rbf kernel are 1
    assert_array_almost_equal(K.flat[::6], np.ones(5)) 
Example #27
Source Project: Mastering-Elasticsearch-7.0   Author: PacktPublishing   File: test_spectral_embedding.py    License: MIT License 5 votes vote down vote up
def test_spectral_embedding_precomputed_affinity(seed=36):
    # Test spectral embedding with precomputed kernel
    gamma = 1.0
    se_precomp = SpectralEmbedding(n_components=2, affinity="precomputed",
                                   random_state=np.random.RandomState(seed))
    se_rbf = SpectralEmbedding(n_components=2, affinity="rbf",
                               gamma=gamma,
                               random_state=np.random.RandomState(seed))
    embed_precomp = se_precomp.fit_transform(rbf_kernel(S, gamma=gamma))
    embed_rbf = se_rbf.fit_transform(S)
    assert_array_almost_equal(
        se_precomp.affinity_matrix_, se_rbf.affinity_matrix_)
    assert _check_with_col_sign_flipping(embed_precomp, embed_rbf, 0.05) 
Example #28
Source Project: Mastering-Elasticsearch-7.0   Author: PacktPublishing   File: test_spectral_embedding.py    License: MIT License 5 votes vote down vote up
def test_spectral_embedding_deterministic():
    # Test that Spectral Embedding is deterministic
    random_state = np.random.RandomState(36)
    data = random_state.randn(10, 30)
    sims = rbf_kernel(data)
    embedding_1 = spectral_embedding(sims)
    embedding_2 = spectral_embedding(sims)
    assert_array_almost_equal(embedding_1, embedding_2) 
Example #29
Source Project: Mastering-Elasticsearch-7.0   Author: PacktPublishing   File: test_svm.py    License: MIT License 5 votes vote down vote up
def test_decision_function():
    # Test decision_function
    # Sanity check, test that decision_function implemented in python
    # returns the same as the one in libsvm
    # multi class:
    clf = svm.SVC(kernel='linear', C=0.1,
                  decision_function_shape='ovo').fit(iris.data, iris.target)

    dec = np.dot(iris.data, clf.coef_.T) + clf.intercept_

    assert_array_almost_equal(dec, clf.decision_function(iris.data))

    # binary:
    clf.fit(X, Y)
    dec = np.dot(X, clf.coef_.T) + clf.intercept_
    prediction = clf.predict(X)
    assert_array_almost_equal(dec.ravel(), clf.decision_function(X))
    assert_array_almost_equal(
        prediction,
        clf.classes_[(clf.decision_function(X) > 0).astype(np.int)])
    expected = np.array([-1., -0.66, -1., 0.66, 1., 1.])
    assert_array_almost_equal(clf.decision_function(X), expected, 2)

    # kernel binary:
    clf = svm.SVC(kernel='rbf', gamma=1, decision_function_shape='ovo')
    clf.fit(X, Y)

    rbfs = rbf_kernel(X, clf.support_vectors_, gamma=clf.gamma)
    dec = np.dot(rbfs, clf.dual_coef_.T) + clf.intercept_
    assert_array_almost_equal(dec.ravel(), clf.decision_function(X)) 
Example #30
Source Project: Mastering-Elasticsearch-7.0   Author: PacktPublishing   File: test_kernel_approximation.py    License: MIT License 5 votes vote down vote up
def test_nystroem_approximation():
    # some basic tests
    rnd = np.random.RandomState(0)
    X = rnd.uniform(size=(10, 4))

    # With n_components = n_samples this is exact
    X_transformed = Nystroem(n_components=X.shape[0]).fit_transform(X)
    K = rbf_kernel(X)
    assert_array_almost_equal(np.dot(X_transformed, X_transformed.T), K)

    trans = Nystroem(n_components=2, random_state=rnd)
    X_transformed = trans.fit(X).transform(X)
    assert_equal(X_transformed.shape, (X.shape[0], 2))

    # test callable kernel
    def linear_kernel(X, Y):
        return np.dot(X, Y.T)
    trans = Nystroem(n_components=2, kernel=linear_kernel, random_state=rnd)
    X_transformed = trans.fit(X).transform(X)
    assert_equal(X_transformed.shape, (X.shape[0], 2))

    # test that available kernels fit and transform
    kernels_available = kernel_metrics()
    for kern in kernels_available:
        trans = Nystroem(n_components=2, kernel=kern, random_state=rnd)
        X_transformed = trans.fit(X).transform(X)
        assert_equal(X_transformed.shape, (X.shape[0], 2))