Python sklearn.model_selection.GridSearchCV() Examples

The following are 30 code examples for showing how to use sklearn.model_selection.GridSearchCV(). These examples are extracted from open source projects. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example.

You may check out the related API usage on the sidebar.

You may also want to check out all available functions/classes of the module sklearn.model_selection , or try the search function .

Example 1
Project: text-classifier   Author: shibing624   File: grid_search_cv.py    License: Apache License 2.0 7 votes vote down vote up
def search_cv(x_train, y_train, x_test, y_test, model=GradientBoostingClassifier(n_estimators=30)):
    # grid search找到最好的参数
    parameters = {'kernel': ('linear', 'rbf'), 'C': [1, 2, 4], 'gamma': [0.125, 0.25, 0.5, 1, 2, 4]}
    clf = GridSearchCV(model, param_grid=parameters)
    grid_search = clf.fit(x_train, y_train)
    # 对结果打分
    print("Best score: %0.3f" % grid_search.best_score_)
    print(grid_search.best_estimator_)

    # best prarams
    print('best prarams:', clf.best_params_)

    print('-----grid search end------------')
    print('on all train set')
    scores = cross_val_score(grid_search.best_estimator_, x_train, y_train, cv=3, scoring='accuracy')
    print(scores.mean(), scores)
    print('on test set')
    scores = cross_val_score(grid_search.best_estimator_, x_test, y_test, cv=3, scoring='accuracy')
    print(scores.mean(), scores) 
Example 2
Project: malss   Author: canard0328   File: malss.py    License: MIT License 6 votes vote down vote up
def __tune_parameters(self):
        for i in range(len(self.algorithms)):
            if self.verbose:
                print('    %s' % self.algorithms[i].name)
            estimator = self.algorithms[i].estimator
            parameters = self.algorithms[i].parameters
            clf = GridSearchCV(
                estimator, parameters, cv=self.cv, scoring=self.scoring,
                iid=False, n_jobs=self.n_jobs)
            clf.fit(self.data.X, self.data.y)
            grid_scores = []
            for j in range(len(clf.cv_results_['mean_test_score'])):
                grid_scores.append((clf.cv_results_['params'][j],
                                    clf.cv_results_['mean_test_score'][j],
                                    clf.cv_results_['std_test_score'][j]))
            self.algorithms[i].estimator = clf.best_estimator_
            self.algorithms[i].best_score = clf.best_score_
            self.algorithms[i].best_params = clf.best_params_
            self.algorithms[i].grid_scores = grid_scores

        self.__search_best_algorithm() 
Example 3
Project: scVI   Author: YosefLab   File: annotation.py    License: MIT License 6 votes vote down vote up
def compute_accuracy_svc(
    data_train,
    labels_train,
    data_test,
    labels_test,
    param_grid=None,
    verbose=0,
    max_iter=-1,
):
    if param_grid is None:
        param_grid = [
            {"C": [1, 10, 100, 1000], "kernel": ["linear"]},
            {"C": [1, 10, 100, 1000], "gamma": [0.001, 0.0001], "kernel": ["rbf"]},
        ]
    svc = SVC(max_iter=max_iter)
    clf = GridSearchCV(svc, param_grid, verbose=verbose, cv=3)
    return compute_accuracy_classifier(
        clf, data_train, labels_train, data_test, labels_test
    ) 
Example 4
Project: cwcf   Author: jaromiru   File: hpc_svm.py    License: MIT License 6 votes vote down vote up
def get_full_rbf_svm_clf(train_x, train_y, c_range=None, gamma_range=None):
		param_grid = dict(gamma=gamma_range, C=c_range)
		cv = StratifiedShuffleSplit(n_splits=2, test_size=0.2, random_state=42)
		grid = GridSearchCV(SVC(cache_size=1024), param_grid=param_grid, cv=cv, n_jobs=14, verbose=10)
		grid.fit(train_x, train_y)
		
		print("The best parameters are %s with a score of %0.2f" % (grid.best_params_, grid.best_score_))
		
		scores = grid.cv_results_['mean_test_score'].reshape(len(c_range), len(gamma_range))
		print("Scores:")
		print(scores)
		
		print("c_range:", c_range)
		print("gamma_range:", gamma_range)

		c_best = grid.best_params_['C']
		gamma_best = grid.best_params_['gamma']

		clf = SVC(C=c_best, gamma=gamma_best, verbose=True)
		return clf

#---------------- 
Example 5
Project: skutil   Author: tgsmith61591   File: grid_search.py    License: BSD 3-Clause "New" or "Revised" License 6 votes vote down vote up
def fit(self, X, y=None, groups=None):
            """Run fit with all sets of parameters.

            Parameters
            ----------

            X : array-like, shape=(n_samples, n_features)
                Training vector, where n_samples is the number of samples and
                n_features is the number of features.

            y : array-like, shape=(n_samples,) or (n_samples, n_output), optional (default=None)
                Target relative to X for classification or regression;
                None for unsupervised learning.

            groups : array-like, shape=(n_samples,), optional (default=None)
                Group labels for the samples used while splitting the dataset into
                train/test set.
            """
            return super(GridSearchCV, self).fit(X, _as_numpy(y), groups) 
Example 6
def paramTuning(features_train, labels_train, nfolds):
	#using the training data and define the number of folds
	#determine the range of the Cs range you want to search
	Cs = [0.001, 0.01, 0.1 ,1, 10, 100, 1000, 10000]

	#determine the range of the gammas range you want to search
	gammas = [0.00000001 ,0.00000001 ,0.0000001, 0.000001, 0.00001, 0.0001, 0.001, 0.01, 0.1 , 1, 10, 100, 1000]

	#make the dictioanry
	param_grid = {'C': Cs, 'gamma': gammas}

	#start the greedy search using all the matching sets from above
	grid_search = GridSearchCV(SVC(kernel='poly'),param_grid,cv=nfolds)

	#fit your training data
	grid_search.fit(features_train, labels_train)

	#visualize the best couple of parameters
	print grid_search.best_params_ 
Example 7
def paramTuning(features_train, labels_train, nfolds):
	#using the training data and define the number of folds
	#determine the range of the Cs range you want to search
	Cs = [0.001 ,0.01 ,0.1 ,1 , 10, 100, 1000, 10000]

	#determine the range of the gammas range you want to search
	gammas = [0.00000001 ,0.00000001 ,0.0000001, 0.000001, 0.00001 , 0.0001, 0.001, 0.01, 0.1, 1, 10, 100]

	#make the dictioanry
	param_grid = {'C': Cs, 'gamma': gammas}

	#start the greedy search using all the matching sets from above
	grid_search = GridSearchCV(SVC(kernel='rbf'),param_grid,cv=nfolds)

	#fit your training data
	grid_search.fit(features_train, labels_train)

	#visualize the best couple of parameters
	print grid_search.best_params_ 
Example 8
def paramTuning(features_train, labels_train, nfolds):
	#using the training data and define the number of folds
	#determine the range of the Cs range you want to search
	Cs = [0.001, 0.01, 0.1 ,1, 10, 100, 1000, 10000]

	#determine the range of the gammas range you want to search
	gammas = [0.00000001 ,0.00000001 ,0.0000001, 0.000001, 0.00001, 0.0001, 0.001, 0.01, 0.1 , 1, 10, 100, 1000]

	#make the dictioanry
	param_grid = {'C': Cs, 'gamma': gammas}

	#start the greedy search using all the matching sets from above
	grid_search = GridSearchCV(SVC(kernel='poly'),param_grid,cv=nfolds)

	#fit your training data
	grid_search.fit(features_train, labels_train)

	#visualize the best couple of parameters
	print grid_search.best_params_ 
Example 9
def paramTuning(features_train, labels_train, nfolds):
	#using the training data and define the number of folds
	#determine the range of the Cs range you want to search
	Cs = [1, 10, 100, 1000, 10000]

	#determine the range of the gammas range you want to search
	gammas = [0.00000001 ,0.00000001 ,0.0000001, 0.000001, 0.00001]

	#make the dictioanry
	param_grid = {'C': Cs, 'gamma': gammas}

	#start the greedy search using all the matching sets from above
	grid_search = GridSearchCV(SVC(kernel='rbf'),param_grid,cv=nfolds)

	#fit your training data
	grid_search.fit(features_train, labels_train)

	#visualize the best couple of parameters
	return grid_search.best_params_ 
Example 10
Project: Speech_Signal_Processing_and_Classification   Author: gionanide   File: svm_multiclass.py    License: MIT License 6 votes vote down vote up
def paramTuning(features_train, labels_train, nfolds):
	#using the training data and define the number of folds
	#determine the range of the Cs range you want to search
	Cs = [1, 10, 100, 1000, 10000]

	#determine the range of the gammas range you want to search
	gammas = [0.00000001 ,0.00000001 ,0.0000001, 0.000001, 0.00001]

	#make the dictioanry
	param_grid = {'C': Cs, 'gamma': gammas}

	#start the greedy search using all the matching sets from above
	grid_search = GridSearchCV(SVC(kernel='rbf'),param_grid,cv=nfolds)

	#fit your training data
	grid_search.fit(features_train, labels_train)

	#visualize the best couple of parameters
	print grid_search.best_params_ 
Example 11
def paramTuning(features_train, labels_train, nfolds):
	#using the training data and define the number of folds
	#determine the range of the Cs range you want to search
	Cs = [1000, 10000, 10000, 1000000]

	#determine the range of the gammas range you want to search
	gammas = [0.00000001 ,0.00000001 ,0.0000001, 0.000001, 0.00001]

	#make the dictioanry
	param_grid = {'C': Cs, 'gamma': gammas}

	#start the greedy search using all the matching sets from above
	grid_search = GridSearchCV(SVC(kernel='rbf'),param_grid,cv=nfolds)

	#fit your training data
	grid_search.fit(features_train, labels_train)

	#visualize the best couple of parameters
	return grid_search.best_params_ 
Example 12
Project: Mastering-Elasticsearch-7.0   Author: PacktPublishing   File: test_weight_boosting.py    License: MIT License 6 votes vote down vote up
def test_gridsearch():
    # Check that base trees can be grid-searched.
    # AdaBoost classification
    boost = AdaBoostClassifier(base_estimator=DecisionTreeClassifier())
    parameters = {'n_estimators': (1, 2),
                  'base_estimator__max_depth': (1, 2),
                  'algorithm': ('SAMME', 'SAMME.R')}
    clf = GridSearchCV(boost, parameters)
    clf.fit(iris.data, iris.target)

    # AdaBoost regression
    boost = AdaBoostRegressor(base_estimator=DecisionTreeRegressor(),
                              random_state=0)
    parameters = {'n_estimators': (1, 2),
                  'base_estimator__max_depth': (1, 2)}
    clf = GridSearchCV(boost, parameters)
    clf.fit(boston.data, boston.target) 
Example 13
Project: Mastering-Elasticsearch-7.0   Author: PacktPublishing   File: test_score_objects.py    License: MIT License 6 votes vote down vote up
def test_check_scoring_gridsearchcv():
    # test that check_scoring works on GridSearchCV and pipeline.
    # slightly redundant non-regression test.

    grid = GridSearchCV(LinearSVC(), param_grid={'C': [.1, 1]})
    scorer = check_scoring(grid, "f1")
    assert isinstance(scorer, _PredictScorer)

    pipe = make_pipeline(LinearSVC())
    scorer = check_scoring(pipe, "f1")
    assert isinstance(scorer, _PredictScorer)

    # check that cross_val_score definitely calls the scorer
    # and doesn't make any assumptions about the estimator apart from having a
    # fit.
    scores = cross_val_score(EstimatorWithFit(), [[1], [2], [3]], [1, 0, 1],
                             scoring=DummyScorer())
    assert_array_equal(scores, 1) 
Example 14
Project: Mastering-Elasticsearch-7.0   Author: PacktPublishing   File: test_impute.py    License: MIT License 6 votes vote down vote up
def test_imputation_pipeline_grid_search():
    # Test imputation within a pipeline + gridsearch.
    X = sparse_random_matrix(100, 100, density=0.10)
    missing_values = X.data[0]

    pipeline = Pipeline([('imputer',
                          SimpleImputer(missing_values=missing_values)),
                         ('tree',
                          tree.DecisionTreeRegressor(random_state=0))])

    parameters = {
        'imputer__strategy': ["mean", "median", "most_frequent"]
    }

    Y = sparse_random_matrix(100, 1, density=0.10).toarray()
    gs = GridSearchCV(pipeline, parameters)
    gs.fit(X, Y) 
Example 15
Project: Mastering-Elasticsearch-7.0   Author: PacktPublishing   File: test_base.py    License: MIT License 6 votes vote down vote up
def test_set_params_passes_all_parameters():
    # Make sure all parameters are passed together to set_params
    # of nested estimator. Regression test for #9944

    class TestDecisionTree(DecisionTreeClassifier):
        def set_params(self, **kwargs):
            super().set_params(**kwargs)
            # expected_kwargs is in test scope
            assert kwargs == expected_kwargs
            return self

    expected_kwargs = {'max_depth': 5, 'min_samples_leaf': 2}
    for est in [Pipeline([('estimator', TestDecisionTree())]),
                GridSearchCV(TestDecisionTree(), {})]:
        est.set_params(estimator__max_depth=5,
                       estimator__min_samples_leaf=2) 
Example 16
Project: Mastering-Elasticsearch-7.0   Author: PacktPublishing   File: test_ridge.py    License: MIT License 6 votes vote down vote up
def test_ridgecv_sample_weight():
    rng = np.random.RandomState(0)
    alphas = (0.1, 1.0, 10.0)

    # There are different algorithms for n_samples > n_features
    # and the opposite, so test them both.
    for n_samples, n_features in ((6, 5), (5, 10)):
        y = rng.randn(n_samples)
        X = rng.randn(n_samples, n_features)
        sample_weight = 1.0 + rng.rand(n_samples)

        cv = KFold(5)
        ridgecv = RidgeCV(alphas=alphas, cv=cv)
        ridgecv.fit(X, y, sample_weight=sample_weight)

        # Check using GridSearchCV directly
        parameters = {'alpha': alphas}
        gs = GridSearchCV(Ridge(), parameters, cv=cv)
        gs.fit(X, y, sample_weight=sample_weight)

        assert ridgecv.alpha_ == gs.best_estimator_.alpha
        assert_array_almost_equal(ridgecv.coef_, gs.best_estimator_.coef_) 
Example 17
Project: Mastering-Elasticsearch-7.0   Author: PacktPublishing   File: test_logistic.py    License: MIT License 6 votes vote down vote up
def test_elastic_net_vs_l1_l2(C):
    # Make sure that elasticnet with grid search on l1_ratio gives same or
    # better results than just l1 or just l2.

    X, y = make_classification(500, random_state=0)
    X_train, X_test, y_train, y_test = train_test_split(X, y, random_state=0)

    param_grid = {'l1_ratio': np.linspace(0, 1, 5)}

    enet_clf = LogisticRegression(penalty='elasticnet', C=C, solver='saga',
                                  random_state=0)
    gs = GridSearchCV(enet_clf, param_grid, cv=5, iid=False, refit=True)

    l1_clf = LogisticRegression(penalty='l1', C=C, solver='saga',
                                random_state=0)
    l2_clf = LogisticRegression(penalty='l2', C=C, solver='saga',
                                random_state=0)

    for clf in (gs, l1_clf, l2_clf):
        clf.fit(X_train, y_train)

    assert gs.score(X_test, y_test) >= l1_clf.score(X_test, y_test)
    assert gs.score(X_test, y_test) >= l2_clf.score(X_test, y_test) 
Example 18
Project: Mastering-Elasticsearch-7.0   Author: PacktPublishing   File: test_kernel_pca.py    License: MIT License 6 votes vote down vote up
def test_gridsearch_pipeline_precomputed():
    # Test if we can do a grid-search to find parameters to separate
    # circles with a perceptron model using a precomputed kernel.
    X, y = make_circles(n_samples=400, factor=.3, noise=.05,
                        random_state=0)
    kpca = KernelPCA(kernel="precomputed", n_components=2)
    pipeline = Pipeline([("kernel_pca", kpca),
                         ("Perceptron", Perceptron(max_iter=5))])
    param_grid = dict(Perceptron__max_iter=np.arange(1, 5))
    grid_search = GridSearchCV(pipeline, cv=3, param_grid=param_grid)
    X_kernel = rbf_kernel(X, gamma=2.)
    grid_search.fit(X_kernel, y)
    assert_equal(grid_search.best_score_, 1)


# 0.23. warning about tol not having its correct default value. 
Example 19
Project: ml-parameter-optimization   Author: arnaudvl   File: ml_tune.py    License: MIT License 6 votes vote down vote up
def apply_gridsearch(self,model):
        """
        apply grid search on ml algorithm to specified parameters
        returns updated best score and parameters
        """
        # check if custom evalution function is specified
        if callable(self.params_cv['scoring']):
            scoring = make_scorer(self.params_cv['scoring'],greater_is_better=self._greater_is_better)
        else:
            scoring = self.params_cv['scoring']
        
        gsearch = GridSearchCV(estimator=model,param_grid=self.get_params_tune(),scoring=scoring,
                               iid=self.params_cv['iid'],cv=self.params_cv['cv_folds'],n_jobs=self.params_cv['n_jobs'])
        gsearch.fit(self.X,self.y)
        
        # update best model if best_score is improved
        if (gsearch.best_score_ * self._score_mult) > (self.best_score * self._score_mult):
            self.best_model = clone(gsearch.best_estimator_)
            self.best_score = gsearch.best_score_
        
        # update tuned parameters with optimal values
        for key,value in gsearch.best_params_.items():
            self._params[key] = value
        self._temp_score = gsearch.best_score_
        return self 
Example 20
Project: pylift   Author: wayfair   File: base.py    License: BSD 2-Clause "Simplified" License 6 votes vote down vote up
def grid_search(self, **kwargs):
        """Grid search using sklearn.model_selection.GridSearchCV.

        Any parameters typically associated with GridSearchCV (see
        sklearn documentation) can be passed as keyword arguments to this
        function.

        The final dictionary used for the grid search is saved to
        `self.grid_search_params`. This is updated with any parameters that are
        passed.

        Examples
        --------
        # Passing kwargs.
        self.grid_search(param_grid={'max_depth':[2,3,5,10]}, refit=True)

        """
        self.grid_search_params.update(kwargs)
        self.grid_search_ = GridSearchCV(**self.grid_search_params)
        self.grid_search_.fit(self.x_train, self.transformed_y_train)
        return self.grid_search_ 
Example 21
Project: pylift   Author: df-foundation   File: base.py    License: BSD 2-Clause "Simplified" License 6 votes vote down vote up
def grid_search(self, **kwargs):
        """Grid search using sklearn.model_selection.GridSearchCV.

        Any parameters typically associated with GridSearchCV (see
        sklearn documentation) can be passed as keyword arguments to this
        function.

        The final dictionary used for the grid search is saved to
        `self.grid_search_params`. This is updated with any parameters that are
        passed.

        Examples
        --------
        # Passing kwargs.
        self.grid_search(param_grid={'max_depth':[2,3,5,10]}, refit=True)

        """
        self.grid_search_params.update(kwargs)
        self.grid_search_ = GridSearchCV(**self.grid_search_params)
        self.grid_search_.fit(self.x_train, self.transformed_y_train)
        return self.grid_search_ 
Example 22
Project: TextSentimentClassification   Author: wslc1314   File: LR.py    License: MIT License 6 votes vote down vote up
def train(self,
              trainPath=general_config.data_dir+"/training_label_new.txt",
              num_cv=5):
        indices, sentences, labels=readNewFile(file=trainPath,
                                               vocab2intPath=general_config.global_static_v2i_path)
        sentences_=[]
        for sentence in sentences:
            sentences_.append(self.embeddings[sentence].mean(axis=0))
        parameters = {'C': [0.001, 0.01, 0.1, 1, 10, 100]}  # Inverse of regularization strength
        self.model = GridSearchCV(self.model, parameters, cv=num_cv, refit=True)
        self.model.fit(X=sentences_,y=labels)
        self.logger.info(self.model.cv_results_)
        self.logger.info(self.model.get_params())
        self.logger.info("Training Accuracy: %s"%self.model.score(X=sentences_,y=labels))
        save_path = self.save_dir + "/model.pkl"
        joblib.dump(self.model, save_path) 
Example 23
Project: fireTS   Author: jxx123   File: core.py    License: MIT License 6 votes vote down vote up
def grid_search(self, X, y, para_grid, **params):
        """
        Perform grid search on the base_estimator. The function first generates
        the lag features and predicting targets, and then calls
        ``GridSearchCV`` in scikit-learn package.

        :param array-like X: exogenous input time series, shape = (n_samples,
                             n_exog_inputs)
        :param array-like y: target time series to predict, shape = (n_samples)
        :param dict para_grid: use the same format in ``GridSearchCV`` in
                               scikit-learn package.
        :param dict params: other keyword arguments that can be passed into
                            ``GridSearchCV`` in scikit-learn package.
        """
        grid = GridSearchCV(self.base_estimator, para_grid, **params)
        X, y = self._check_and_preprocess_X_y(X, y)
        features, target = self._preprocess_data(X, y)
        grid.fit(features, target)
        self.set_params(**grid.best_params_) 
Example 24
Project: skorch   Author: skorch-dev   File: test_helper.py    License: BSD 3-Clause "New" or "Revised" License 6 votes vote down vote up
def test_grid_search_with_dict_works(
            self, sldict_cls, data, classifier_module):
        from sklearn.model_selection import GridSearchCV
        from skorch import NeuralNetClassifier

        net = NeuralNetClassifier(classifier_module)
        X, y = data
        X = sldict_cls(X=X)
        params = {
            'lr': [0.01, 0.02],
            'max_epochs': [10, 20],
        }
        gs = GridSearchCV(net, params, refit=True, cv=3, scoring='accuracy',
                          iid=True)
        gs.fit(X, y)
        print(gs.best_score_, gs.best_params_) 
Example 25
Project: skorch   Author: skorch-dev   File: test_helper.py    License: BSD 3-Clause "New" or "Revised" License 6 votes vote down vote up
def test_grid_search_with_slds_works(
            self, slds, y, classifier_module):
        from sklearn.model_selection import GridSearchCV
        from skorch import NeuralNetClassifier

        net = NeuralNetClassifier(
            classifier_module,
            train_split=False,
            verbose=0,
        )
        params = {
            'lr': [0.01, 0.02],
            'max_epochs': [10, 20],
        }
        gs = GridSearchCV(net, params, refit=False, cv=3, scoring='accuracy', iid=True)
        gs.fit(slds, y)  # does not raise 
Example 26
Project: skorch   Author: skorch-dev   File: test_helper.py    License: BSD 3-Clause "New" or "Revised" License 6 votes vote down vote up
def test_grid_search_with_slds_X_and_slds_y(
            self, slds, slds_y, classifier_module):
        from sklearn.model_selection import GridSearchCV
        from skorch import NeuralNetClassifier

        net = NeuralNetClassifier(
            classifier_module,
            train_split=False,
            verbose=0,
        )
        params = {
            'lr': [0.01, 0.02],
            'max_epochs': [10, 20],
        }
        gs = GridSearchCV(net, params, refit=False, cv=3, scoring='accuracy', iid=True)
        gs.fit(slds, slds_y)  # does not raise 
Example 27
Project: pyglmnet   Author: glm-tools   File: test_pyglmnet.py    License: MIT License 6 votes vote down vote up
def test_cv():
    """Simple CV check."""
    # XXX: don't use scikit-learn for tests.
    X, y = make_regression()
    cv = KFold(n_splits=5)

    glm_normal = GLM(distr='gaussian', alpha=0.01, reg_lambda=0.1)
    # check that it returns 5 scores
    scores = cross_val_score(glm_normal, X, y, cv=cv)
    assert(len(scores) == 5)

    param_grid = [{'alpha': np.linspace(0.01, 0.99, 2)},
                  {'reg_lambda': np.logspace(np.log(0.5), np.log(0.01),
                                             10, base=np.exp(1))}]
    glmcv = GridSearchCV(glm_normal, param_grid, cv=cv)
    glmcv.fit(X, y) 
Example 28
Project: scikit-gstat   Author: mmaelicke   File: interfaces.py    License: MIT License 6 votes vote down vote up
def test_find_best_model(self):
        """
        Use GridSearchCV to find the best model for the given data
        which should be the spherical model
        """
        parameters = dict(
            model=('spherical', 'gaussian', 'exponential', 'matern')
        )
        gs = GridSearchCV(
            VariogramEstimator(n_lags=15, normalize=False), 
            parameters,
            cv=3
        )

        gs = gs.fit(self.c, self.v)

        self.assertEqual(gs.best_params_['model'], 'spherical') 
Example 29
Project: scikit-gstat   Author: mmaelicke   File: interfaces.py    License: MIT License 6 votes vote down vote up
def test_find_best_model_future_cv(self):
        """
        cv parameter will change to 5 in sklearn 0.22
        This will change the result, though
        """
        parameters = dict(
            model=('spherical', 'gaussian', 'exponential', 'matern')
        )
        gs = GridSearchCV(
            VariogramEstimator(n_lags=15, normalize=False), 
            parameters,
            cv=5
        )

        gs = gs.fit(self.c, self.v)

        self.assertEqual(gs.best_params_['model'], 'matern') 
Example 30
Project: kaggle-code   Author: CNuge   File: comments_xgb_final.py    License: MIT License 5 votes vote down vote up
def optimal_params(xgb_model, x_vals, y_vals, xgb_param_grid):
	""" take a model, predictor matrix and paramater grid and
		return the optimal paramater set """
	_gsearch = GridSearchCV(xgb_model,  xgb_param_grid, 
								scoring='roc_auc', 
								n_jobs=4, 
								iid=False, 
								cv=3)
	_gsearch.fit(x_vals, y_vals)

	return _gsearch.best_params_