Python sklearn.cross_validation.LeaveOneOut() Examples

The following are code examples for showing how to use sklearn.cross_validation.LeaveOneOut(). They are from open source Python projects. You can vote up the examples you like or vote down the ones you don't like.

Example 1
Project: chicago-crime   Author: thekingofkings   File: NBRegression.py    MIT License 7 votes vote down vote up
def permutation_Test_LR(Y, f):
    
    Y = Y.reshape((len(Y),))
    loo = cross_validation.LeaveOneOut(len(Y))
    
    errors = []
    for train_idx, test_idx in loo:
        f_train, f_test = f[train_idx], f[test_idx]
        Y_train, Y_test = Y[train_idx], Y[test_idx]
        
        r = linearRegression(f_train, Y_train)
        y = r.predict(f_test)
        errors.append(np.abs(Y_test - y))
        
    mae = np.mean(errors)
    mre = mae / Y.mean()
    
    return mae, mre 
Example 2
Project: chicago-crime   Author: thekingofkings   File: multi_view_prediction.py    MIT License 6 votes vote down vote up
def leaveOneOut_eval(X, Y):
    X = sm.add_constant(X, prepend=False)
    loo = LeaveOneOut(len(Y))
    er = []
    for train_idx, test_idx in loo:
        nbm, yp = NBmodel(train_idx, Y, X)
        ybar = nbm.predict(X[test_idx])
        y_error = np.abs(ybar - Y[test_idx])
        if y_error > 20 * Y[test_idx]:
            print test_idx, y_error, Y[test_idx]
            continue
        er.append(y_error)
    max_idx = np.argmax(er)
    print "largest error", er[max_idx], Y[max_idx], max_idx+1
    mae = np.mean(er)
    mre = mae / np.mean(Y)
    return mre, mae 
Example 3
Project: linear_neuron   Author: uglyboxer   File: test_cross_validation.py    MIT License 6 votes vote down vote up
def test_cross_val_generator_with_default_indices():
    X = np.array([[1, 2], [3, 4], [5, 6], [7, 8]])
    y = np.array([1, 1, 2, 2])
    labels = np.array([1, 2, 3, 4])
    loo = cval.LeaveOneOut(4)
    lpo = cval.LeavePOut(4, 2)
    kf = cval.KFold(4, 2)
    skf = cval.StratifiedKFold(y, 2)
    lolo = cval.LeaveOneLabelOut(labels)
    lopo = cval.LeavePLabelOut(labels, 2)
    b = cval.Bootstrap(2)  # only in index mode
    ss = cval.ShuffleSplit(2)
    ps = cval.PredefinedSplit([1, 1, 2, 2])
    for cv in [loo, lpo, kf, skf, lolo, lopo, b, ss, ps]:
        for train, test in cv:
            assert_not_equal(np.asarray(train).dtype.kind, 'b')
            assert_not_equal(np.asarray(train).dtype.kind, 'b')
            X[train], X[test]
            y[train], y[test] 
Example 4
Project: linear_neuron   Author: uglyboxer   File: test_cross_validation.py    MIT License 6 votes vote down vote up
def test_cross_indices_exception():
    X = coo_matrix(np.array([[1, 2], [3, 4], [5, 6], [7, 8]]))
    y = np.array([1, 1, 2, 2])
    labels = np.array([1, 2, 3, 4])
    loo = cval.LeaveOneOut(4, indices=False)
    lpo = cval.LeavePOut(4, 2, indices=False)
    kf = cval.KFold(4, 2, indices=False)
    skf = cval.StratifiedKFold(y, 2, indices=False)
    lolo = cval.LeaveOneLabelOut(labels, indices=False)
    lopo = cval.LeavePLabelOut(labels, 2, indices=False)

    assert_raises(ValueError, cval.check_cv, loo, X, y)
    assert_raises(ValueError, cval.check_cv, lpo, X, y)
    assert_raises(ValueError, cval.check_cv, kf, X, y)
    assert_raises(ValueError, cval.check_cv, skf, X, y)
    assert_raises(ValueError, cval.check_cv, lolo, X, y)
    assert_raises(ValueError, cval.check_cv, lopo, X, y) 
Example 5
Project: Weiss   Author: WangWenjun559   File: test_cross_validation.py    Apache License 2.0 6 votes vote down vote up
def test_cross_val_generator_with_indices():
    X = np.array([[1, 2], [3, 4], [5, 6], [7, 8]])
    y = np.array([1, 1, 2, 2])
    labels = np.array([1, 2, 3, 4])
    # explicitly passing indices value is deprecated
    loo = cval.LeaveOneOut(4)
    lpo = cval.LeavePOut(4, 2)
    kf = cval.KFold(4, 2)
    skf = cval.StratifiedKFold(y, 2)
    lolo = cval.LeaveOneLabelOut(labels)
    lopo = cval.LeavePLabelOut(labels, 2)
    ps = cval.PredefinedSplit([1, 1, 2, 2])
    ss = cval.ShuffleSplit(2)
    for cv in [loo, lpo, kf, skf, lolo, lopo, ss, ps]:
        for train, test in cv:
            assert_not_equal(np.asarray(train).dtype.kind, 'b')
            assert_not_equal(np.asarray(train).dtype.kind, 'b')
            X[train], X[test]
            y[train], y[test] 
Example 6
Project: Weiss   Author: WangWenjun559   File: test_cross_validation.py    Apache License 2.0 6 votes vote down vote up
def test_cross_val_generator_with_default_indices():
    X = np.array([[1, 2], [3, 4], [5, 6], [7, 8]])
    y = np.array([1, 1, 2, 2])
    labels = np.array([1, 2, 3, 4])
    loo = cval.LeaveOneOut(4)
    lpo = cval.LeavePOut(4, 2)
    kf = cval.KFold(4, 2)
    skf = cval.StratifiedKFold(y, 2)
    lolo = cval.LeaveOneLabelOut(labels)
    lopo = cval.LeavePLabelOut(labels, 2)
    ss = cval.ShuffleSplit(2)
    ps = cval.PredefinedSplit([1, 1, 2, 2])
    for cv in [loo, lpo, kf, skf, lolo, lopo, ss, ps]:
        for train, test in cv:
            assert_not_equal(np.asarray(train).dtype.kind, 'b')
            assert_not_equal(np.asarray(train).dtype.kind, 'b')
            X[train], X[test]
            y[train], y[test] 
Example 7
Project: wine-ml-on-aws-lambda   Author: pierreant   File: test_cross_validation.py    Apache License 2.0 6 votes vote down vote up
def test_cross_val_generator_with_indices():
    X = np.array([[1, 2], [3, 4], [5, 6], [7, 8]])
    y = np.array([1, 1, 2, 2])
    labels = np.array([1, 2, 3, 4])
    # explicitly passing indices value is deprecated
    loo = cval.LeaveOneOut(4)
    lpo = cval.LeavePOut(4, 2)
    kf = cval.KFold(4, 2)
    skf = cval.StratifiedKFold(y, 2)
    lolo = cval.LeaveOneLabelOut(labels)
    lopo = cval.LeavePLabelOut(labels, 2)
    ps = cval.PredefinedSplit([1, 1, 2, 2])
    ss = cval.ShuffleSplit(2)
    for cv in [loo, lpo, kf, skf, lolo, lopo, ss, ps]:
        for train, test in cv:
            assert_not_equal(np.asarray(train).dtype.kind, 'b')
            assert_not_equal(np.asarray(train).dtype.kind, 'b')
            X[train], X[test]
            y[train], y[test] 
Example 8
Project: wine-ml-on-aws-lambda   Author: pierreant   File: test_cross_validation.py    Apache License 2.0 6 votes vote down vote up
def test_cross_val_generator_with_default_indices():
    X = np.array([[1, 2], [3, 4], [5, 6], [7, 8]])
    y = np.array([1, 1, 2, 2])
    labels = np.array([1, 2, 3, 4])
    loo = cval.LeaveOneOut(4)
    lpo = cval.LeavePOut(4, 2)
    kf = cval.KFold(4, 2)
    skf = cval.StratifiedKFold(y, 2)
    lolo = cval.LeaveOneLabelOut(labels)
    lopo = cval.LeavePLabelOut(labels, 2)
    ss = cval.ShuffleSplit(2)
    ps = cval.PredefinedSplit([1, 1, 2, 2])
    for cv in [loo, lpo, kf, skf, lolo, lopo, ss, ps]:
        for train, test in cv:
            assert_not_equal(np.asarray(train).dtype.kind, 'b')
            assert_not_equal(np.asarray(train).dtype.kind, 'b')
            X[train], X[test]
            y[train], y[test] 
Example 9
Project: jamespy_py3   Author: jskDr   File: codes.py    MIT License 6 votes vote down vote up
def _cross_val_score_loo_r0( lm, X, y):
	"""
	mean_square_error metric is used from sklearn.metric.

	Return 
	--------
	The mean squared error values are returned. 
	"""

	if len( y.shape) == 1:
		y = np.array( [y]).T

	kf = cross_validation.LeaveOneOut( y.shape[0])
	score_l = list()
	for tr, te in kf:
		lm.fit( X[tr,:], y[tr,:])
		yp = lm.predict( X[te, :])
		score_l.append( metrics.mean_squared_error( y[te,:], yp))

	return score_l 
Example 10
Project: jamespy_py3   Author: jskDr   File: codes.py    MIT License 6 votes vote down vote up
def cross_val_score_loo( lm, X, y):
	"""
	mean_square_error metric is used from sklearn.metric.

	Return 
	--------
	The mean squared error values are returned. 
	"""
	# Transformed to array if they are list, np.mat
	X = np.array( X)
	y = np.array( y)
	# Later, assert can be used to define the size of X and y

	if len( y.shape) == 1:
		y = np.array( [y]).T

	kf = cross_validation.LeaveOneOut( y.shape[0])
	# flatterned error vectors for each point are stored in this vector.
	errors_l = list()
	for tr, te in kf:
		lm.fit( X[tr,:], y[tr,:])
		yp = lm.predict( X[te, :])
		errors_l.extend( (y[te,:] - yp).flatten().tolist())

	return errors_l 
Example 11
Project: jamespy_py3   Author: jskDr   File: jmimo.py    MIT License 6 votes vote down vote up
def cv_pilot_reg_only(self, alpha = 0):
		model = self.model
		yT_a = self.rx_p["yT_a"]
		x_a = self.rx_p["x_a"]

		# kf = KFold() 
		# loo = cross_validation.LeaveOneOut( x_a.shape[0])
		if alpha == 0:
			lm = linear_model.LinearRegression()
		else:
			lm = getattr( linear_model, model)(alpha)
		scores = codes.cross_val_score_loo( lm, yT_a, x_a)

		# Output is stored with enviromental variables.
		pdi = pd.DataFrame()
		pdi["model"] = [model]
		pdi["alpha"] = [alpha]
		pdi["metric"] = ["mean_squared_error"]
		pdi["E[scores]"] = [np.mean(np.power(scores,2))] # MSE
		pdi["std[scores]"] = ["t.b.d."]
		pdi["scores"] = [scores]

		return pdi 
Example 12
Project: BuildingMachineLearning   Author: ademyanchuk   File: chapter.py    MIT License 5 votes vote down vote up
def accuracy(features, labels):
    from sklearn.linear_model import LogisticRegression
    from sklearn.pipeline import Pipeline
    from sklearn.preprocessing import StandardScaler
    from sklearn import cross_validation
    
    clf = Pipeline([('preproc', StandardScaler()),
                ('classifier', LogisticRegression())])
    cv = cross_validation.LeaveOneOut(len(features))
    scores = cross_validation.cross_val_score(
        clf, features, labels, cv=cv)
    return scores.mean() 
Example 13
Project: BuildingMachineLearning   Author: ademyanchuk   File: image-classification.py    MIT License 5 votes vote down vote up
def accuracy(features, labels):
    from sklearn.linear_model import LogisticRegression
    from sklearn.pipeline import Pipeline
    from sklearn.preprocessing import StandardScaler
    from sklearn import cross_validation
    # We use logistic regression because it is very fast.
    # Feel free to experiment with other classifiers
    clf = Pipeline([('preproc', StandardScaler()),
                ('classifier', LogisticRegression())])
    cv = cross_validation.LeaveOneOut(len(features))
    scores = cross_validation.cross_val_score(
        clf, features, labels, cv=cv)
    return scores.mean() 
Example 14
Project: chicago-crime   Author: thekingofkings   File: NBRegression.py    MIT License 5 votes vote down vote up
def NB_training_python(features, crimeRates):
    """
    Use Python package (statsmodeles) to train NB regression model
    """
    errors = []
    loo = cross_validation.LeaveOneOut(len(crimeRates))
    for train_idx, test_idx in loo:
        train_Y = crimeRates[train_idx]
        train_X = features[train_idx]
        nbmodel = sm.GLM(train_Y, train_X, family=sm.families.NegativeBinomial())
        model_res = nbmodel.fit()
        ybar = nbmodel.predict(model_res.params, features[test_idx])
        errors.append(abs(ybar-crimeRates[test_idx]))
    return np.mean(errors), np.std(errors), np.mean(errors) / np.mean(crimeRates) 
Example 15
Project: chicago-crime   Author: thekingofkings   File: NBRegression.py    MIT License 5 votes vote down vote up
def LR_training_python(lrf, Y, verboseoutput):    
    Y = Y.reshape((len(Y),))
    loo = cross_validation.LeaveOneOut(len(Y))
    mae2 = 0
    errors2 = []
    for train_idx, test_idx in loo:
        f_train, f_test = lrf[train_idx], lrf[test_idx]
        Y_train, Y_test = Y[train_idx], Y[test_idx]
        if not np.any(np.isnan(f_train)) and np.all(np.isfinite(f_train)):
            r2 = linearRegression(f_train, Y_train)
            y2 = r2.predict(f_test)
            errors2.append( np.abs( Y_test - y2 ) )
            if verboseoutput:
                print Y_test[0], y2[0]
        else:
            print 'nan or infinite'
            pass

    mae2 = np.mean(errors2)
    var2 = np.sqrt( np.var(errors2) )
    mre2 = mae2 / Y.mean()
    return mae2, var2, mre2


#+++++++++++++++++++++++++++++++++++++++++++++++++
# Experiment Evaluation
#+++++++++++++++++++++++++++++++++++++++++++++++++ 
Example 16
Project: chicago-crime   Author: thekingofkings   File: graph_embedding.py    MIT License 5 votes vote down vote up
def leaveOneOut_error(Y, X):
    """
    Use GLM model from python statsmodels library to fit data.
    Evaluate with leave-one-out setting, return the average of n errors.
    
    Input:    
    features    - a list features. ['all'] == ['demo', 'poi', 'geo', 'taxi']
    gwr_gamma   - the GWR weight matrx

    Output:
    error - the average error of k leave-one-out evaluation
    """
    errors = []
    errs_train = np.zeros(2)
    loo = LeaveOneOut(len(Y))
    X = sm.add_constant(X, prepend=False)
    for train_idx, test_idx in loo:
        X_train, Y_train = X[train_idx], Y[train_idx]
        # Train NegativeBinomial Model from statsmodels library
        glm = sm.GLM(Y_train, X_train, family=sm.families.NegativeBinomial())
        nbm = glm.fit()
        ybar = nbm.predict(X[train_idx])
        er_train = np.mean(np.abs(ybar - Y[train_idx]))
        errs_train += er_train, er_train / np.mean(Y[train_idx])
#        print er_train, er_train / np.mean(Y[train_idx])
        ybar = nbm.predict(X[test_idx])
        errors.append(np.abs(ybar - Y[test_idx]))
#        print ybar, Y[test_idx]
    print errs_train / len(Y)
    return np.mean(errors), np.mean(Y), np.mean(errors / Y), np.mean(errors) / np.mean(Y) 
Example 17
Project: chicago-crime   Author: thekingofkings   File: multi_view_prediction.py    MIT License 5 votes vote down vote up
def test_mvl_fuse_function(self):
        Y, D, P, T, G = generate_raw_samples()
        T = sm.add_constant(T, prepend=False)
        P = sm.add_constant(P, prepend=False)
        D = sm.add_constant(D, prepend=False)
        G = sm.add_constant(G, prepend=False)
        loo = LeaveOneOut(len(Y))
        er = []
        for train_idx, test_idx in loo:
            tm = taxi_view_model(train_idx, Y, T)
            pm = poi_view_model(train_idx, Y, P)
            gm = geo_view_model(train_idx, Y, G)
            dm = demo_view_model(train_idx, Y, D)
            models = [tm, pm, gm, dm]
            lm = mvl_fuse_function(models, train_idx, Y)
            
            
            tm_test = tm[0].predict(T[test_idx])
            pm_test = pm[0].predict(P[test_idx])
            gm_test = gm[0].predict(G[test_idx])
            dm_test = dm[0].predict(D[test_idx])
            
            newX_test = np.array([1, tm_test, pm_test, gm_test, dm_test])
            ybar = lm.predict(newX_test)
            y_error = ybar - Y[test_idx]
#            if np.abs(y_error / Y[test_idx]) > 0.8:
#                print test_idx, ybar, Y[test_idx], newX_test
            er.append(y_error)
        mre = np.mean(np.abs(er)) / np.mean(Y)
        print "MVL with linear fusion function MRE: {0}".format(mre)
#        self.visualize_prediction_error(er, Y, "MVL linear combination") 
Example 18
Project: Building-Machine-Learning-Systems-With-Python-Second-Edition   Author: PacktPublishing   File: image-classification.py    MIT License 5 votes vote down vote up
def accuracy(features, labels):
    from sklearn.linear_model import LogisticRegression
    from sklearn.pipeline import Pipeline
    from sklearn.preprocessing import StandardScaler
    from sklearn import cross_validation
    # We use logistic regression because it is very fast.
    # Feel free to experiment with other classifiers
    clf = Pipeline([('preproc', StandardScaler()),
                ('classifier', LogisticRegression())])
    cv = cross_validation.LeaveOneOut(len(features))
    scores = cross_validation.cross_val_score(
        clf, features, labels, cv=cv)
    return scores.mean() 
Example 19
Project: linear_neuron   Author: uglyboxer   File: test_cross_validation.py    MIT License 5 votes vote down vote up
def test_cross_val_generator_with_mask():
    X = np.array([[1, 2], [3, 4], [5, 6], [7, 8]])
    y = np.array([1, 1, 2, 2])
    labels = np.array([1, 2, 3, 4])
    loo = assert_warns(DeprecationWarning, cval.LeaveOneOut,
                       4, indices=False)
    lpo = assert_warns(DeprecationWarning, cval.LeavePOut,
                       4, 2, indices=False)
    kf = assert_warns(DeprecationWarning, cval.KFold,
                      4, 2, indices=False)
    skf = assert_warns(DeprecationWarning, cval.StratifiedKFold,
                       y, 2, indices=False)
    lolo = assert_warns(DeprecationWarning, cval.LeaveOneLabelOut,
                        labels, indices=False)
    lopo = assert_warns(DeprecationWarning, cval.LeavePLabelOut,
                        labels, 2, indices=False)
    ss = assert_warns(DeprecationWarning, cval.ShuffleSplit,
                      4, indices=False)
    ps = assert_warns(DeprecationWarning, cval.PredefinedSplit, [1, 1, 2, 2],
                      indices=False)
    for cv in [loo, lpo, kf, skf, lolo, lopo, ss, ps]:
        for train, test in cv:
            assert_equal(np.asarray(train).dtype.kind, 'b')
            assert_equal(np.asarray(train).dtype.kind, 'b')
            X[train], X[test]
            y[train], y[test] 
Example 20
Project: linear_neuron   Author: uglyboxer   File: test_cross_validation.py    MIT License 5 votes vote down vote up
def test_cross_val_generator_with_indices():
    X = np.array([[1, 2], [3, 4], [5, 6], [7, 8]])
    y = np.array([1, 1, 2, 2])
    labels = np.array([1, 2, 3, 4])
    # explicitly passing indices value is deprecated
    loo = assert_warns(DeprecationWarning, cval.LeaveOneOut,
                       4, indices=True)
    lpo = assert_warns(DeprecationWarning, cval.LeavePOut,
                       4, 2, indices=True)
    kf = assert_warns(DeprecationWarning, cval.KFold,
                      4, 2, indices=True)
    skf = assert_warns(DeprecationWarning, cval.StratifiedKFold,
                       y, 2, indices=True)
    lolo = assert_warns(DeprecationWarning, cval.LeaveOneLabelOut,
                        labels, indices=True)
    lopo = assert_warns(DeprecationWarning, cval.LeavePLabelOut,
                        labels, 2, indices=True)
    ps = assert_warns(DeprecationWarning, cval.PredefinedSplit,
                      [1, 1, 2, 2], indices=True)
    # Bootstrap as a cross-validation is deprecated
    b = assert_warns(DeprecationWarning, cval.Bootstrap, 2)
    ss = assert_warns(DeprecationWarning, cval.ShuffleSplit,
                      2, indices=True)
    for cv in [loo, lpo, kf, skf, lolo, lopo, b, ss, ps]:
        for train, test in cv:
            assert_not_equal(np.asarray(train).dtype.kind, 'b')
            assert_not_equal(np.asarray(train).dtype.kind, 'b')
            X[train], X[test]
            y[train], y[test] 
Example 21
Project: linear_neuron   Author: uglyboxer   File: test_cross_validation.py    MIT License 5 votes vote down vote up
def test_cross_val_generator_mask_indices_same():
    # Test that the cross validation generators return the same results when
    # indices=True and when indices=False
    y = np.array([0, 0, 0, 1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2])
    labels = np.array([1, 1, 2, 3, 3, 3, 4])

    loo_mask = cval.LeaveOneOut(5, indices=False)
    loo_ind = cval.LeaveOneOut(5, indices=True)
    lpo_mask = cval.LeavePOut(10, 2, indices=False)
    lpo_ind = cval.LeavePOut(10, 2, indices=True)
    kf_mask = cval.KFold(10, 5, indices=False, shuffle=True, random_state=1)
    kf_ind = cval.KFold(10, 5, indices=True, shuffle=True, random_state=1)
    skf_mask = cval.StratifiedKFold(y, 3, indices=False)
    skf_ind = cval.StratifiedKFold(y, 3, indices=True)
    lolo_mask = cval.LeaveOneLabelOut(labels, indices=False)
    lolo_ind = cval.LeaveOneLabelOut(labels, indices=True)
    lopo_mask = cval.LeavePLabelOut(labels, 2, indices=False)
    lopo_ind = cval.LeavePLabelOut(labels, 2, indices=True)
    ps_mask = cval.PredefinedSplit([1, 1, 2, 2], indices=False)
    ps_ind = cval.PredefinedSplit([1, 1, 2, 2], indices=True)

    for cv_mask, cv_ind in [(loo_mask, loo_ind), (lpo_mask, lpo_ind),
                            (kf_mask, kf_ind), (skf_mask, skf_ind),
                            (lolo_mask, lolo_ind), (lopo_mask, lopo_ind),
                            (ps_mask, ps_ind)]:
        for (train_mask, test_mask), (train_ind, test_ind) in \
                zip(cv_mask, cv_ind):
            assert_array_equal(np.where(train_mask)[0], train_ind)
            assert_array_equal(np.where(test_mask)[0], test_ind) 
Example 22
Project: Weiss   Author: WangWenjun559   File: test_cross_validation.py    Apache License 2.0 5 votes vote down vote up
def test_cross_val_predict():
    boston = load_boston()
    X, y = boston.data, boston.target
    cv = cval.KFold(len(boston.target))

    est = Ridge()

    # Naive loop (should be same as cross_val_predict):
    preds2 = np.zeros_like(y)
    for train, test in cv:
        est.fit(X[train], y[train])
        preds2[test] = est.predict(X[test])

    preds = cval.cross_val_predict(est, X, y, cv=cv)
    assert_array_almost_equal(preds, preds2)

    preds = cval.cross_val_predict(est, X, y)
    assert_equal(len(preds), len(y))

    cv = cval.LeaveOneOut(len(y))
    preds = cval.cross_val_predict(est, X, y, cv=cv)
    assert_equal(len(preds), len(y))

    Xsp = X.copy()
    Xsp *= (Xsp > np.median(Xsp))
    Xsp = coo_matrix(Xsp)
    preds = cval.cross_val_predict(est, Xsp, y)
    assert_array_almost_equal(len(preds), len(y))

    preds = cval.cross_val_predict(KMeans(), X)
    assert_equal(len(preds), len(y))

    def bad_cv():
        for i in range(4):
            yield np.array([0, 1, 2, 3]), np.array([4, 5, 6, 7, 8])

    assert_raises(ValueError, cval.cross_val_predict, est, X, y, cv=bad_cv()) 
Example 23
Project: wine-ml-on-aws-lambda   Author: pierreant   File: test_cross_validation.py    Apache License 2.0 5 votes vote down vote up
def test_cross_val_predict():
    boston = load_boston()
    X, y = boston.data, boston.target
    cv = cval.KFold(len(boston.target))

    est = Ridge()

    # Naive loop (should be same as cross_val_predict):
    preds2 = np.zeros_like(y)
    for train, test in cv:
        est.fit(X[train], y[train])
        preds2[test] = est.predict(X[test])

    preds = cval.cross_val_predict(est, X, y, cv=cv)
    assert_array_almost_equal(preds, preds2)

    preds = cval.cross_val_predict(est, X, y)
    assert_equal(len(preds), len(y))

    cv = cval.LeaveOneOut(len(y))
    preds = cval.cross_val_predict(est, X, y, cv=cv)
    assert_equal(len(preds), len(y))

    Xsp = X.copy()
    Xsp *= (Xsp > np.median(Xsp))
    Xsp = coo_matrix(Xsp)
    preds = cval.cross_val_predict(est, Xsp, y)
    assert_array_almost_equal(len(preds), len(y))

    preds = cval.cross_val_predict(KMeans(), X)
    assert_equal(len(preds), len(y))

    def bad_cv():
        for i in range(4):
            yield np.array([0, 1, 2, 3]), np.array([4, 5, 6, 7, 8])

    assert_raises(ValueError, cval.cross_val_predict, est, X, y, cv=bad_cv()) 
Example 24
Project: jamespy_py3   Author: jskDr   File: jmimo.py    MIT License 5 votes vote down vote up
def _cv_pilot_reg_only_r0(self, alpha = 0):
		model = self.model
		yT_a = self.rx_p["yT_a"]
		x_a = self.rx_p["x_a"]

		# kf = KFold() 
		# loo = cross_validation.LeaveOneOut( x_a.shape[0])
		if alpha == 0:
			lm = linear_model.LinearRegression()
		else:
			lm = getattr( linear_model, model)(alpha)
		scores = codes.cross_val_score_loo( lm, yT_a, x_a)

		return scores 
Example 25
Project: jamespy_py3   Author: jskDr   File: jmimo.py    MIT License 5 votes vote down vote up
def _cv_pilot_reg_r0(self, alpha = 0):

		"""
		Cross-validatin scores are evaluated using LOO. 
		SNRpilot is equal to SNR, which is SNRdata.		
		"""
		Npilot = self.Npilot
		SNRpilot = self.SNR
		model = self.model

		BPSK, s_a, x_flat_a, x_a = gen_BPSK( Npilot, self.Nt)
		# H_a = gen_H( self.Nr, self.Nt)
		# H_a = self.H_a
		y_a = gen_Rx( self.Nr, Npilot, SNRpilot, self.H_a, x_a)

		yT_a = y_a.T

		# kf = KFold() 
		# loo = cross_validation.LeaveOneOut( x_a.shape[0])
		if alpha == 0:
			lm = linear_model.LinearRegression()
		else:
			lm = getattr( linear_model, model)(alpha)
		scores = codes.cross_val_score_loo( lm, yT_a, x_a)

		return scores 
Example 26
Project: MI-POGUE   Author: twytock   File: figure-1.py    GNU General Public License v3.0 5 votes vote down vote up
def cv_loo(Z1,gexp,n_neighbors=8):
    from sklearn.cross_validation import LeaveOneOut
    looDF_L = []
    for _itrain,_itest in LeaveOneOut(gexp.shape[0]):
        train_gexp = gexp.iloc[_itrain]
        train_gr = Z1.iloc[_itrain]
        test_gexp = gexp.iloc[_itest]
        test_gr = Z1.iloc[_itest]
        clf_loo = create_growth_rate_mapping(train_gexp,train_gr,n_neighbors=n_neighbors)
        pred = clf_loo.predict(test_gexp)
        act = test_gr
        looDF = pa.DataFrame({'Predicted':pa.Series(pred,index=test_gexp.index),
                              'Actual':test_gr})
        looDF_L.append(looDF)
    return pa.concat(looDF_L,axis=0) 
Example 27
Project: chicago-crime   Author: thekingofkings   File: multi_view_prediction.py    MIT License 4 votes vote down vote up
def test_view_model_independently(self):
        Y, D, P, T, G = generate_raw_samples()
        loo = LeaveOneOut(len(Y))
        T = sm.add_constant(T, prepend=False)
        P = sm.add_constant(P, prepend=False)
        D = sm.add_constant(D, prepend=False)
        G = sm.add_constant(G, prepend=False)
        ter = []
        per = []
        der = []
        ger = []
        for train_idx, test_idx in loo:
            nbm, yp = taxi_view_model(train_idx, Y, T)
            ybar = nbm.predict(T[test_idx])
            ter.append(ybar - Y[test_idx])
            
            nbm, yp = poi_view_model(train_idx, Y, P)
            ybar = nbm.predict(P[test_idx])
            per.append(ybar - Y[test_idx])
            
            nbm, yp = demo_view_model(train_idx, Y, D)
            ybar = nbm.predict(D[test_idx])
            der.append(ybar - Y[test_idx])
            
            nbm, yp = demo_view_model(train_idx, Y, G)
            ybar = nbm.predict(G[test_idx])
            ger.append(ybar - Y[test_idx])
            
        tmre = np.mean(np.abs(ter)) / np.mean(Y)
        print "Taxi MRE: {0}".format(tmre)
        assert( tmre < 0.5 )
#        self.visualize_prediction_error(ter, Y, "Taxi view")
        
        pmre = np.mean(np.abs(per)) / np.mean(Y)
        print "POI MRE: {0}".format(pmre)
        assert( pmre < 0.8 )
#        self.visualize_prediction_error(per, Y, "POI view")
        
        dmre = np.mean(np.abs(der)) / np.mean(Y)
        print "Demo MRE: {0}".format(dmre)
        assert( dmre < 0.8 )
#        self.visualize_prediction_error(der, Y, "Demo view")
        
        gmre = np.mean(np.abs(ger)) / np.mean(Y)
        print "Geo MRE: {0}".format(gmre)
        assert( gmre < 0.5 )
#        self.visualize_prediction_error(ger, Y, "Geo view")