Python sklearn.manifold.LocallyLinearEmbedding() Examples

The following are 20 code examples of sklearn.manifold.LocallyLinearEmbedding(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module sklearn.manifold , or try the search function .
Example #1
Source File: utils.py    From timeception with GNU General Public License v3.0 7 votes vote down vote up
def learn_manifold(manifold_type, feats, n_components=2):
    if manifold_type == 'tsne':
        feats_fitted = manifold.TSNE(n_components=n_components, random_state=0).fit_transform(feats)
    elif manifold_type == 'isomap':
        feats_fitted = manifold.Isomap(n_components=n_components).fit_transform(feats)
    elif manifold_type == 'mds':
        feats_fitted = manifold.MDS(n_components=n_components).fit_transform(feats)
    elif manifold_type == 'spectral':
        feats_fitted = manifold.SpectralEmbedding(n_components=n_components).fit_transform(feats)
    else:
        raise Exception('wrong maniford type!')

    # methods = ['standard', 'ltsa', 'hessian', 'modified']
    # feats_fitted = manifold.LocallyLinearEmbedding(n_components=n_components, method=methods[0]).fit_transform(pred)

    return feats_fitted 
Example #2
Source File: utils.py    From deep-smoke-machine with BSD 3-Clause "New" or "Revised" License 7 votes vote down vote up
def learn_manifold(manifold_type, feats, n_components=2):
    if manifold_type == 'tsne':
        feats_fitted = manifold.TSNE(n_components=n_components, random_state=0).fit_transform(feats)
    elif manifold_type == 'isomap':
        feats_fitted = manifold.Isomap(n_components=n_components).fit_transform(feats)
    elif manifold_type == 'mds':
        feats_fitted = manifold.MDS(n_components=n_components).fit_transform(feats)
    elif manifold_type == 'spectral':
        feats_fitted = manifold.SpectralEmbedding(n_components=n_components).fit_transform(feats)
    else:
        raise Exception('wrong maniford type!')

    # methods = ['standard', 'ltsa', 'hessian', 'modified']
    # feats_fitted = manifold.LocallyLinearEmbedding(n_components=n_components, method=methods[0]).fit_transform(pred)

    return feats_fitted 
Example #3
Source File: test_lle.py    From megaman with BSD 2-Clause "Simplified" License 6 votes vote down vote up
def test_lle_manifold():
    rng = np.random.RandomState(0)
    # similar test on a slightly more complex manifold
    X = np.array(list(product(np.arange(18), repeat=2)))
    X = np.c_[X, X[:, 0] ** 2 / 18]
    X = X + 1e-10 * rng.uniform(size=X.shape)
    n_components = 2
    G = geom.Geometry(adjacency_kwds = {'radius':3})
    G.set_data_matrix(X)
    distance_matrix = G.compute_adjacency_matrix()
    tol = 1.5
    N = lle.barycenter_graph(distance_matrix, X).todense()
    reconstruction_error = np.linalg.norm(np.dot(N, X) - X)
    assert(reconstruction_error < tol)
    for eigen_solver in EIGEN_SOLVERS:
        clf = lle.LocallyLinearEmbedding(n_components = n_components, geom = G,
                                eigen_solver = eigen_solver, random_state = rng)
        clf.fit(X)
        assert(clf.embedding_.shape[1] == n_components)
        reconstruction_error = np.linalg.norm(
            np.dot(N, clf.embedding_) - clf.embedding_, 'fro') ** 2
        assert(reconstruction_error < tol) 
Example #4
Source File: utils.py    From videograph with GNU General Public License v3.0 6 votes vote down vote up
def learn_manifold(manifold_type, feats, n_components=2):
    if manifold_type == 'tsne':
        feats_fitted = manifold.TSNE(n_components=n_components, random_state=0).fit_transform(feats)
    elif manifold_type == 'isomap':
        feats_fitted = manifold.Isomap(n_components=n_components).fit_transform(feats)
    elif manifold_type == 'mds':
        feats_fitted = manifold.MDS(n_components=n_components).fit_transform(feats)
    elif manifold_type == 'spectral':
        feats_fitted = manifold.SpectralEmbedding(n_components=n_components).fit_transform(feats)
    else:
        raise Exception('wrong maniford type!')

    # methods = ['standard', 'ltsa', 'hessian', 'modified']
    # feats_fitted = manifold.LocallyLinearEmbedding(n_components=n_components, method=methods[0]).fit_transform(pred)

    return feats_fitted 
Example #5
Source File: test_lle.py    From megaman with BSD 2-Clause "Simplified" License 6 votes vote down vote up
def test_lle_simple_grid():
    # note: ARPACK is numerically unstable, so this test will fail for
    #       some random seeds.  We choose 20 because the tests pass.
    rng = np.random.RandomState(20)
    tol = 0.1
    # grid of equidistant points in 2D, n_components = n_dim
    X = np.array(list(product(range(5), repeat=2)))
    X = X + 1e-10 * rng.uniform(size=X.shape)
    n_components = 2
    G = geom.Geometry(adjacency_kwds = {'radius':3})
    G.set_data_matrix(X)
    tol = 0.1
    distance_matrix = G.compute_adjacency_matrix()
    N = lle.barycenter_graph(distance_matrix, X).todense()
    reconstruction_error = np.linalg.norm(np.dot(N, X) - X, 'fro')
    assert(reconstruction_error < tol)
    for eigen_solver in EIGEN_SOLVERS:
        clf = lle.LocallyLinearEmbedding(n_components = n_components, geom = G,
                                eigen_solver = eigen_solver, random_state = rng)
        clf.fit(X)
        assert(clf.embedding_.shape[1] == n_components)
        reconstruction_error = np.linalg.norm(
        np.dot(N, clf.embedding_) - clf.embedding_, 'fro') ** 2
        assert(reconstruction_error < tol) 
Example #6
Source File: test_lle.py    From megaman with BSD 2-Clause "Simplified" License 6 votes vote down vote up
def test_lle_with_sklearn():
    N = 10
    X, color = datasets.samples_generator.make_s_curve(N, random_state=0)
    n_components = 2
    n_neighbors = 3
    knn = NearestNeighbors(n_neighbors + 1).fit(X)
    G = geom.Geometry()
    G.set_data_matrix(X)
    G.set_adjacency_matrix(knn.kneighbors_graph(X, mode = 'distance'))
    sk_Y_lle = manifold.LocallyLinearEmbedding(n_neighbors, n_components, method = 'standard').fit_transform(X)
    (mm_Y_lle, err) = lle.locally_linear_embedding(G, n_components)
    assert(_check_with_col_sign_flipping(sk_Y_lle, mm_Y_lle, 0.05)) 
Example #7
Source File: test_locally_linear.py    From twitter-stock-recommendation with MIT License 5 votes vote down vote up
def test_integer_input():
    rand = np.random.RandomState(0)
    X = rand.randint(0, 100, size=(20, 3))

    for method in ["standard", "hessian", "modified", "ltsa"]:
        clf = manifold.LocallyLinearEmbedding(method=method, n_neighbors=10)
        clf.fit(X)  # this previously raised a TypeError 
Example #8
Source File: test_locally_linear.py    From twitter-stock-recommendation with MIT License 5 votes vote down vote up
def test_pipeline():
    # check that LocallyLinearEmbedding works fine as a Pipeline
    # only checks that no error is raised.
    # TODO check that it actually does something useful
    from sklearn import pipeline, datasets
    X, y = datasets.make_blobs(random_state=0)
    clf = pipeline.Pipeline(
        [('filter', manifold.LocallyLinearEmbedding(random_state=0)),
         ('clf', neighbors.KNeighborsClassifier())])
    clf.fit(X, y)
    assert_less(.9, clf.score(X, y))


# Test the error raised when the weight matrix is singular 
Example #9
Source File: test_locally_linear.py    From twitter-stock-recommendation with MIT License 5 votes vote down vote up
def test_lle_init_parameters():
    X = np.random.rand(5, 3)

    clf = manifold.LocallyLinearEmbedding(eigen_solver="error")
    msg = "unrecognized eigen_solver 'error'"
    assert_raise_message(ValueError, msg, clf.fit, X)

    clf = manifold.LocallyLinearEmbedding(method="error")
    msg = "unrecognized method 'error'"
    assert_raise_message(ValueError, msg, clf.fit, X) 
Example #10
Source File: test_locally_linear.py    From twitter-stock-recommendation with MIT License 5 votes vote down vote up
def test_lle_manifold():
    rng = np.random.RandomState(0)
    # similar test on a slightly more complex manifold
    X = np.array(list(product(np.arange(18), repeat=2)))
    X = np.c_[X, X[:, 0] ** 2 / 18]
    X = X + 1e-10 * rng.uniform(size=X.shape)
    n_components = 2
    for method in ["standard", "hessian", "modified", "ltsa"]:
        clf = manifold.LocallyLinearEmbedding(n_neighbors=6,
                                              n_components=n_components,
                                              method=method, random_state=0)
        tol = 1.5 if method == "standard" else 3

        N = barycenter_kneighbors_graph(X, clf.n_neighbors).toarray()
        reconstruction_error = linalg.norm(np.dot(N, X) - X)
        assert_less(reconstruction_error, tol)

        for solver in eigen_solvers:
            clf.set_params(eigen_solver=solver)
            clf.fit(X)
            assert_true(clf.embedding_.shape[1] == n_components)
            reconstruction_error = linalg.norm(
                np.dot(N, clf.embedding_) - clf.embedding_, 'fro') ** 2
            details = ("solver: %s, method: %s" % (solver, method))
            assert_less(reconstruction_error, tol, msg=details)
            assert_less(np.abs(clf.reconstruction_error_ -
                               reconstruction_error),
                        tol * reconstruction_error, msg=details)


# Test the error raised when parameter passed to lle is invalid 
Example #11
Source File: test_locally_linear.py    From twitter-stock-recommendation with MIT License 5 votes vote down vote up
def test_lle_simple_grid():
    # note: ARPACK is numerically unstable, so this test will fail for
    #       some random seeds.  We choose 2 because the tests pass.
    rng = np.random.RandomState(2)

    # grid of equidistant points in 2D, n_components = n_dim
    X = np.array(list(product(range(5), repeat=2)))
    X = X + 1e-10 * rng.uniform(size=X.shape)
    n_components = 2
    clf = manifold.LocallyLinearEmbedding(n_neighbors=5,
                                          n_components=n_components,
                                          random_state=rng)
    tol = 0.1

    N = barycenter_kneighbors_graph(X, clf.n_neighbors).toarray()
    reconstruction_error = linalg.norm(np.dot(N, X) - X, 'fro')
    assert_less(reconstruction_error, tol)

    for solver in eigen_solvers:
        clf.set_params(eigen_solver=solver)
        clf.fit(X)
        assert_true(clf.embedding_.shape[1] == n_components)
        reconstruction_error = linalg.norm(
            np.dot(N, clf.embedding_) - clf.embedding_, 'fro') ** 2

        assert_less(reconstruction_error, tol)
        assert_almost_equal(clf.reconstruction_error_,
                            reconstruction_error, decimal=1)

    # re-embed a noisy version of X using the transform method
    noise = rng.randn(*X.shape) / 100
    X_reembedded = clf.transform(X + noise)
    assert_less(linalg.norm(X_reembedded - clf.embedding_), tol) 
Example #12
Source File: test_manifold.py    From pandas-ml with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def test_objectmapper(self):
        df = pdml.ModelFrame([])
        self.assertIs(df.manifold.LocallyLinearEmbedding,
                      manifold.LocallyLinearEmbedding)
        self.assertIs(df.manifold.Isomap, manifold.Isomap)
        self.assertIs(df.manifold.MDS, manifold.MDS)
        self.assertIs(df.manifold.SpectralEmbedding, manifold.SpectralEmbedding)
        self.assertIs(df.manifold.TSNE, manifold.TSNE) 
Example #13
Source File: test_ltsa.py    From megaman with BSD 2-Clause "Simplified" License 5 votes vote down vote up
def test_ltsa_with_sklearn():
    N = 10
    X, color = datasets.samples_generator.make_s_curve(N, random_state=0)
    n_components = 2
    n_neighbors = 3
    knn = NearestNeighbors(n_neighbors + 1).fit(X)
    G = geom.Geometry()
    G.set_data_matrix(X)
    G.set_adjacency_matrix(knn.kneighbors_graph(X, mode = 'distance'))
    sk_Y_ltsa = manifold.LocallyLinearEmbedding(n_neighbors, n_components,
                                                method = 'ltsa',
                                                eigen_solver = 'arpack').fit_transform(X)
    (mm_Y_ltsa, err) = ltsa.ltsa(G, n_components, eigen_solver = 'arpack')
    assert(_check_with_col_sign_flipping(sk_Y_ltsa, mm_Y_ltsa, 0.05)) 
Example #14
Source File: test_locally_linear.py    From Mastering-Elasticsearch-7.0 with MIT License 5 votes vote down vote up
def test_integer_input():
    rand = np.random.RandomState(0)
    X = rand.randint(0, 100, size=(20, 3))

    for method in ["standard", "hessian", "modified", "ltsa"]:
        clf = manifold.LocallyLinearEmbedding(method=method, n_neighbors=10)
        clf.fit(X)  # this previously raised a TypeError 
Example #15
Source File: test_locally_linear.py    From Mastering-Elasticsearch-7.0 with MIT License 5 votes vote down vote up
def test_pipeline():
    # check that LocallyLinearEmbedding works fine as a Pipeline
    # only checks that no error is raised.
    # TODO check that it actually does something useful
    from sklearn import pipeline, datasets
    X, y = datasets.make_blobs(random_state=0)
    clf = pipeline.Pipeline(
        [('filter', manifold.LocallyLinearEmbedding(random_state=0)),
         ('clf', neighbors.KNeighborsClassifier())])
    clf.fit(X, y)
    assert_less(.9, clf.score(X, y))


# Test the error raised when the weight matrix is singular 
Example #16
Source File: test_locally_linear.py    From Mastering-Elasticsearch-7.0 with MIT License 5 votes vote down vote up
def test_lle_init_parameters():
    X = np.random.rand(5, 3)

    clf = manifold.LocallyLinearEmbedding(eigen_solver="error")
    msg = "unrecognized eigen_solver 'error'"
    assert_raise_message(ValueError, msg, clf.fit, X)

    clf = manifold.LocallyLinearEmbedding(method="error")
    msg = "unrecognized method 'error'"
    assert_raise_message(ValueError, msg, clf.fit, X) 
Example #17
Source File: test_locally_linear.py    From Mastering-Elasticsearch-7.0 with MIT License 5 votes vote down vote up
def test_lle_manifold():
    rng = np.random.RandomState(0)
    # similar test on a slightly more complex manifold
    X = np.array(list(product(np.arange(18), repeat=2)))
    X = np.c_[X, X[:, 0] ** 2 / 18]
    X = X + 1e-10 * rng.uniform(size=X.shape)
    n_components = 2
    for method in ["standard", "hessian", "modified", "ltsa"]:
        clf = manifold.LocallyLinearEmbedding(n_neighbors=6,
                                              n_components=n_components,
                                              method=method, random_state=0)
        tol = 1.5 if method == "standard" else 3

        N = barycenter_kneighbors_graph(X, clf.n_neighbors).toarray()
        reconstruction_error = linalg.norm(np.dot(N, X) - X)
        assert_less(reconstruction_error, tol)

        for solver in eigen_solvers:
            clf.set_params(eigen_solver=solver)
            clf.fit(X)
            assert clf.embedding_.shape[1] == n_components
            reconstruction_error = linalg.norm(
                np.dot(N, clf.embedding_) - clf.embedding_, 'fro') ** 2
            details = ("solver: %s, method: %s" % (solver, method))
            assert_less(reconstruction_error, tol, msg=details)
            assert_less(np.abs(clf.reconstruction_error_ -
                               reconstruction_error),
                        tol * reconstruction_error, msg=details)


# Test the error raised when parameter passed to lle is invalid 
Example #18
Source File: test_locally_linear.py    From Mastering-Elasticsearch-7.0 with MIT License 5 votes vote down vote up
def test_lle_simple_grid():
    # note: ARPACK is numerically unstable, so this test will fail for
    #       some random seeds.  We choose 2 because the tests pass.
    rng = np.random.RandomState(2)

    # grid of equidistant points in 2D, n_components = n_dim
    X = np.array(list(product(range(5), repeat=2)))
    X = X + 1e-10 * rng.uniform(size=X.shape)
    n_components = 2
    clf = manifold.LocallyLinearEmbedding(n_neighbors=5,
                                          n_components=n_components,
                                          random_state=rng)
    tol = 0.1

    N = barycenter_kneighbors_graph(X, clf.n_neighbors).toarray()
    reconstruction_error = linalg.norm(np.dot(N, X) - X, 'fro')
    assert_less(reconstruction_error, tol)

    for solver in eigen_solvers:
        clf.set_params(eigen_solver=solver)
        clf.fit(X)
        assert clf.embedding_.shape[1] == n_components
        reconstruction_error = linalg.norm(
            np.dot(N, clf.embedding_) - clf.embedding_, 'fro') ** 2

        assert_less(reconstruction_error, tol)
        assert_almost_equal(clf.reconstruction_error_,
                            reconstruction_error, decimal=1)

    # re-embed a noisy version of X using the transform method
    noise = rng.randn(*X.shape) / 100
    X_reembedded = clf.transform(X + noise)
    assert_less(linalg.norm(X_reembedded - clf.embedding_), tol) 
Example #19
Source File: visualization.py    From TensorFlow_DCIGN with MIT License 4 votes vote down vote up
def visualize_encodings(encodings, file_name=None,
                        grid=None, skip_every=999, fast=False, fig=None, interactive=False):
  encodings = manual_pca(encodings)
  if encodings.shape[1] <= 3:
    return print_data_only(encodings, file_name, fig=fig, interactive=interactive)

  encodings = encodings[0:720]
  hessian_euc = dist.squareform(dist.pdist(encodings[0:720], 'euclidean'))
  hessian_cos = dist.squareform(dist.pdist(encodings[0:720], 'cosine'))
  grid = (3, 4) if grid is None else grid
  project_ops = []

  n = 2
  project_ops.append(("LLE ltsa       N:%d" % n, mn.LocallyLinearEmbedding(10, n, method='ltsa')))
  project_ops.append(("LLE modified   N:%d" % n, mn.LocallyLinearEmbedding(10, n, method='modified')))
  project_ops.append(('MDS euclidean  N:%d' % n, mn.MDS(n, max_iter=300, n_init=1, dissimilarity='precomputed')))
  project_ops.append(("TSNE 30/2000   N:%d" % n, TSNE(perplexity=30, n_components=n, init='pca', n_iter=2000)))
  n = 3
  project_ops.append(("LLE ltsa       N:%d" % n, mn.LocallyLinearEmbedding(10, n, method='ltsa')))
  project_ops.append(("LLE modified   N:%d" % n, mn.LocallyLinearEmbedding(10, n, method='modified')))
  project_ops.append(('MDS euclidean  N:%d' % n, mn.MDS(n, max_iter=300, n_init=1, dissimilarity='precomputed')))
  project_ops.append(('MDS cosine     N:%d' % n, mn.MDS(n, max_iter=300, n_init=1, dissimilarity='precomputed')))

  plot_places = []
  for i in range(12):
    u, v = int(i / (skip_every - 1)), i % (skip_every - 1)
    j = v + u * skip_every + 1
    plot_places.append(j)

  fig = get_figure(fig)
  fig.set_size_inches(fig.get_size_inches()[0] * grid[0] / 1.,
                      fig.get_size_inches()[1] * grid[1] / 2.0)

  for i, (name, manifold) in enumerate(project_ops):
    is3d = 'N:3' in name

    try:
      if is3d:
        subplot = plt.subplot(grid[0], grid[1], plot_places[i], projection='3d')
      else:
        subplot = plt.subplot(grid[0], grid[1], plot_places[i])

      data_source = encodings if not _needs_hessian(manifold) else \
        (hessian_cos if 'cosine' in name else hessian_euc)
      projections = manifold.fit_transform(data_source)
      scatter(subplot, projections, is3d, _build_radial_colors(len(data_source)))
      subplot.set_title(name)
    except:
      print(name, "Unexpected error: ", sys.exc_info()[0], sys.exc_info()[1] if len(sys.exc_info()) > 1 else '')

  visualize_data_same(encodings, grid=grid, places=plot_places[-4:])
  if not interactive:
    save_fig(file_name, fig)
  ut.print_time('visualization finished') 
Example #20
Source File: testing_and_visualisation.py    From ImageSetCleaner with GNU General Public License v3.0 4 votes vote down vote up
def see_iso_map(bottlenecks, labels, suptitle=None):
    """

    :param bottlenecks:
    :param labels:
    :param suptitle: String to add as plot suptitles
    :return: Nothing, will just plot a scatter plot to show the distribution of our data after dimensionality reduction.
    """

    n_samples, n_features = bottlenecks.shape
    n_neighbors = 25
    n_components = 2
    start_index_outlier = np.where(labels == 1)[0][0]
    alpha_inlier = 0.25

    B_iso = manifold.Isomap(n_neighbors, n_components).fit_transform(bottlenecks)
    B_pca = decomposition.TruncatedSVD(n_components=2).fit_transform(bottlenecks)
    B_lle = manifold.LocallyLinearEmbedding(n_neighbors, n_components, method='standard').fit_transform(bottlenecks)
    B_spec = manifold.SpectralEmbedding(n_components=n_components, random_state=42,
                                        eigen_solver='arpack').fit_transform(bottlenecks)

    plt.figure()

    plt.subplot(221)
    plt.scatter(B_iso[:start_index_outlier, 0], B_iso[:start_index_outlier, 1], marker='o', c='b', alpha=alpha_inlier)
    plt.scatter(B_iso[start_index_outlier:, 0], B_iso[start_index_outlier:, 1], marker='^', c='k')
    plt.title("Isomap projection")

    plt.subplot(222)
    inlier_scatter = plt.scatter(B_lle[:start_index_outlier, 0], B_lle[:start_index_outlier, 1], marker='o', c='b',
                                 alpha=alpha_inlier)
    outlier_scatter = plt.scatter(B_lle[start_index_outlier:, 0], B_lle[start_index_outlier:, 1], marker='^', c='k')
    plt.legend([inlier_scatter, outlier_scatter], ['Inliers', 'Outliers'], loc='lower left')
    plt.title("Locally Linear Embedding")

    plt.subplot(223)
    plt.scatter(B_pca[:start_index_outlier, 0], B_pca[:start_index_outlier, 1], marker='o', c='b', alpha=alpha_inlier)
    plt.scatter(B_pca[start_index_outlier:, 0], B_pca[start_index_outlier:, 1], marker='^', c='k')
    plt.title("Principal Components projection")

    plt.subplot(224)
    plt.scatter(B_spec[:start_index_outlier, 0], B_spec[:start_index_outlier, 1], marker='o', c='b', alpha=alpha_inlier)
    plt.scatter(B_spec[start_index_outlier:, 0], B_spec[start_index_outlier:, 1], marker='^', c='k')
    plt.title("Spectral embedding")

    if suptitle:
        plt.suptitle(suptitle)