Python sklearn.linear_model.SGDRegressor() Examples

The following are 30 code examples for showing how to use sklearn.linear_model.SGDRegressor(). These examples are extracted from open source projects. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example.

You may check out the related API usage on the sidebar.

You may also want to check out all available functions/classes of the module sklearn.linear_model , or try the search function .

Example 1
Project: Mastering-Elasticsearch-7.0   Author: PacktPublishing   File: test_multioutput.py    License: MIT License 8 votes vote down vote up
def test_multi_target_regression_partial_fit():
    X, y = datasets.make_regression(n_targets=3)
    X_train, y_train = X[:50], y[:50]
    X_test, y_test = X[50:], y[50:]

    references = np.zeros_like(y_test)
    half_index = 25
    for n in range(3):
        sgr = SGDRegressor(random_state=0, max_iter=5)
        sgr.partial_fit(X_train[:half_index], y_train[:half_index, n])
        sgr.partial_fit(X_train[half_index:], y_train[half_index:, n])
        references[:, n] = sgr.predict(X_test)

    sgr = MultiOutputRegressor(SGDRegressor(random_state=0, max_iter=5))

    sgr.partial_fit(X_train[:half_index], y_train[:half_index])
    sgr.partial_fit(X_train[half_index:], y_train[half_index:])

    y_pred = sgr.predict(X_test)
    assert_almost_equal(references, y_pred)
    assert not hasattr(MultiOutputRegressor(Lasso), 'partial_fit') 
Example 2
Project: EDeN   Author: fabriziocosta   File: estimator.py    License: MIT License 6 votes vote down vote up
def set_params(self, r=3, d=8, nbits=16, discrete=True,
                   normalization=True, inner_normalization=True,
                   penalty='elasticnet', loss='squared_loss'):
        """setter."""
        self.r = r
        self.d = d
        self.nbits = nbits
        self.normalization = normalization
        self.inner_normalization = inner_normalization
        self.discrete = discrete
        self.model = SGDRegressor(
            loss=loss, penalty=penalty,
            average=True, shuffle=True,
            max_iter=5, tol=None)
        self.vectorizer = Vectorizer(
            r=self.r, d=self.d,
            normalization=self.normalization,
            inner_normalization=self.inner_normalization,
            discrete=self.discrete,
            nbits=self.nbits)
        return self 
Example 3
Project: scikit-learn-extra   Author: scikit-learn-contrib   File: test_robust_weighted_estimator.py    License: BSD 3-Clause "New" or "Revised" License 6 votes vote down vote up
def test_not_robust_regression(loss, weighting):
    clf = RobustWeightedEstimator(
        SGDRegressor(),
        loss=loss,
        max_iter=100,
        weighting=weighting,
        k=0,
        c=1e7,
        burn_in=0,
        random_state=rng,
    )
    clf_not_rob = SGDRegressor(loss=loss, random_state=rng)
    clf.fit(X_r, y_r)
    clf_not_rob.fit(X_r, y_r)
    pred1 = clf.predict(X_r)
    pred2 = clf_not_rob.predict(X_r)

    assert np.linalg.norm(pred1 - pred2) / np.linalg.norm(
        pred2
    ) < np.linalg.norm(pred1 - y_r) / np.linalg.norm(y_r) 
Example 4
Project: sia-cog   Author: tech-quantum   File: scikitlearn.py    License: MIT License 6 votes vote down vote up
def getModels():
    result = []
    result.append("LinearRegression")
    result.append("BayesianRidge")
    result.append("ARDRegression")
    result.append("ElasticNet")
    result.append("HuberRegressor")
    result.append("Lasso")
    result.append("LassoLars")
    result.append("Rigid")
    result.append("SGDRegressor")
    result.append("SVR")
    result.append("MLPClassifier")
    result.append("KNeighborsClassifier")
    result.append("SVC")
    result.append("GaussianProcessClassifier")
    result.append("DecisionTreeClassifier")
    result.append("RandomForestClassifier")
    result.append("AdaBoostClassifier")
    result.append("GaussianNB")
    result.append("LogisticRegression")
    result.append("QuadraticDiscriminantAnalysis")
    return result 
Example 5
Project: jh-kaggle-util   Author: jeffheaton   File: ensemble_glm.py    License: Apache License 2.0 6 votes vote down vote up
def fit_ensemble(x,y):
    fit_type = jhkaggle.jhkaggle_config['FIT_TYPE']
    if 1:
        if fit_type == jhkaggle.const.FIT_TYPE_BINARY_CLASSIFICATION:
            blend = SGDClassifier(loss="log", penalty="elasticnet")  # LogisticRegression()
        else:
            # blend = SGDRegressor()
            #blend = LinearRegression()
            #blend = RandomForestRegressor(n_estimators=10, n_jobs=-1, max_depth=5, criterion='mae')
            blend = LassoLarsCV(normalize=True)
            #blend = ElasticNetCV(normalize=True)
            #blend = LinearRegression(normalize=True)
        blend.fit(x, y)
    else:
        blend = LogisticRegression()
        blend.fit(x, y)


    return blend 
Example 6
Project: sklearn-onnx   Author: onnx   File: test_sklearn_bagging_converter.py    License: MIT License 6 votes vote down vote up
def test_bagging_regressor_sgd(self):
        model, X = fit_regression_model(
            BaggingRegressor(SGDRegressor()))
        model_onnx = convert_sklearn(
            model,
            "bagging regressor",
            [("input", FloatTensorType([None, X.shape[1]]))],
            dtype=np.float32,
        )
        self.assertIsNotNone(model_onnx)
        dump_data_and_model(
            X,
            model,
            model_onnx,
            basename="SklearnBaggingRegressorSGD-Dec4",
            allow_failure="StrictVersion(onnxruntime.__version__)"
            "<= StrictVersion('0.2.1')",
        ) 
Example 7
Project: sklearn-onnx   Author: onnx   File: test_sklearn_glm_regressor_converter.py    License: MIT License 6 votes vote down vote up
def test_model_sgd_regressor(self):
        model, X = fit_regression_model(linear_model.SGDRegressor())
        model_onnx = convert_sklearn(
            model,
            "scikit-learn SGD regression",
            [("input", FloatTensorType([None, X.shape[1]]))],
        )
        self.assertIsNotNone(model_onnx)
        dump_data_and_model(
            X,
            model,
            model_onnx,
            basename="SklearnSGDRegressor-Dec4",
            allow_failure="StrictVersion("
            "onnxruntime.__version__)"
            "<= StrictVersion('0.2.1')",
        ) 
Example 8
Project: sklearn-onnx   Author: onnx   File: test_sklearn_glm_regressor_converter.py    License: MIT License 6 votes vote down vote up
def test_model_sgd_regressor_int(self):
        model, X = fit_regression_model(
            linear_model.SGDRegressor(), is_int=True)
        model_onnx = convert_sklearn(
            model, "SGD regression",
            [("input", Int64TensorType([None, X.shape[1]]))])
        self.assertIsNotNone(model_onnx)
        dump_data_and_model(
            X,
            model,
            model_onnx,
            basename="SklearnSGDRegressorInt-Dec4",
            allow_failure="StrictVersion("
            "onnxruntime.__version__)"
            "<= StrictVersion('0.2.1')",
        ) 
Example 9
Project: sklearn-onnx   Author: onnx   File: test_sklearn_glm_regressor_converter.py    License: MIT License 6 votes vote down vote up
def test_model_sgd_regressor_bool(self):
        model, X = fit_regression_model(
            linear_model.SGDRegressor(), is_bool=True)
        model_onnx = convert_sklearn(
            model, "SGD regression",
            [("input", BooleanTensorType([None, X.shape[1]]))])
        self.assertIsNotNone(model_onnx)
        dump_data_and_model(
            X,
            model,
            model_onnx,
            basename="SklearnSGDRegressorBool",
            allow_failure="StrictVersion("
            "onnxruntime.__version__)"
            "<= StrictVersion('0.2.1')",
        ) 
Example 10
Project: twitter-stock-recommendation   Author: alvarobartt   File: test_multioutput.py    License: MIT License 6 votes vote down vote up
def test_multi_target_regression_partial_fit():
    X, y = datasets.make_regression(n_targets=3)
    X_train, y_train = X[:50], y[:50]
    X_test, y_test = X[50:], y[50:]

    references = np.zeros_like(y_test)
    half_index = 25
    for n in range(3):
        sgr = SGDRegressor(random_state=0, max_iter=5)
        sgr.partial_fit(X_train[:half_index], y_train[:half_index, n])
        sgr.partial_fit(X_train[half_index:], y_train[half_index:, n])
        references[:, n] = sgr.predict(X_test)

    sgr = MultiOutputRegressor(SGDRegressor(random_state=0, max_iter=5))

    sgr.partial_fit(X_train[:half_index], y_train[:half_index])
    sgr.partial_fit(X_train[half_index:], y_train[half_index:])

    y_pred = sgr.predict(X_test)
    assert_almost_equal(references, y_pred)
    assert_false(hasattr(MultiOutputRegressor(Lasso), 'partial_fit')) 
Example 11
Project: scikit-learn-extra   Author: scikit-learn-contrib   File: robust_weighted_estimator.py    License: BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def _estimator_type(self):
        if self.base_estimator is None:
            return SGDRegressor()._estimator_type
        else:
            return self.base_estimator._estimator_type 
Example 12
Project: scikit-learn-extra   Author: scikit-learn-contrib   File: test_robust_weighted_estimator.py    License: BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def test_corrupted_regression(loss, weighting):
    reg = RobustWeightedEstimator(
        SGDRegressor(),
        loss=loss,
        max_iter=50,
        weighting=weighting,
        k=4,
        c=None,
        random_state=rng,
    )
    reg.fit(X_rc, y_rc)
    score = median_absolute_error(reg.predict(X_rc), y_rc)
    assert score < 0.2 
Example 13
Project: scikit-multiflow   Author: scikit-multiflow   File: regressor_chains.py    License: BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def __init__(self, base_estimator=SGDRegressor(), order=None, random_state=None):
        super().__init__()
        self.base_estimator = base_estimator
        self.order = order
        self.random_state = random_state
        self.chain = None
        self.ensemble = None
        self.L = None
        self._random_state = None   # This is the actual random_state object used internally
        self.__configure() 
Example 14
Project: Mastering-Elasticsearch-7.0   Author: PacktPublishing   File: test_multioutput.py    License: MIT License 5 votes vote down vote up
def test_multi_target_sample_weight_partial_fit():
    # weighted regressor
    X = [[1, 2, 3], [4, 5, 6]]
    y = [[3.141, 2.718], [2.718, 3.141]]
    w = [2., 1.]
    rgr_w = MultiOutputRegressor(SGDRegressor(random_state=0, max_iter=5))
    rgr_w.partial_fit(X, y, w)

    # weighted with different weights
    w = [2., 2.]
    rgr = MultiOutputRegressor(SGDRegressor(random_state=0, max_iter=5))
    rgr.partial_fit(X, y, w)

    assert_not_equal(rgr.predict(X)[0][0], rgr_w.predict(X)[0][0]) 
Example 15
Project: Mastering-Elasticsearch-7.0   Author: PacktPublishing   File: test_sgd.py    License: MIT License 5 votes vote down vote up
def fit(self, X, y, *args, **kw):
        X = sp.csr_matrix(X)
        return linear_model.SGDRegressor.fit(self, X, y, *args, **kw) 
Example 16
Project: Mastering-Elasticsearch-7.0   Author: PacktPublishing   File: test_sgd.py    License: MIT License 5 votes vote down vote up
def partial_fit(self, X, y, *args, **kw):
        X = sp.csr_matrix(X)
        return linear_model.SGDRegressor.partial_fit(self, X, y, *args, **kw) 
Example 17
Project: Mastering-Elasticsearch-7.0   Author: PacktPublishing   File: test_sgd.py    License: MIT License 5 votes vote down vote up
def decision_function(self, X, *args, **kw):
        X = sp.csr_matrix(X)
        return linear_model.SGDRegressor.decision_function(self, X, *args,
                                                           **kw) 
Example 18
Project: thingflow-python   Author: mpi-sws-rse   File: kalman_model.py    License: Apache License 2.0 5 votes vote down vote up
def __init__(self):
        OutputThing.__init__(self, ports=['train', 'observe', 'predict'])
        self.clf = linear_model.SGDRegressor() 
Example 19
Project: mercari-solution   Author: pjankiewicz   File: transformers.py    License: MIT License 5 votes vote down vote up
def fit(self, X, y, *args):
        sgd = SGDRegressor(penalty='l1', loss='squared_loss', alpha=3.0e-11, power_t=-0.12, eta0=0.019, random_state=0,
                           average=True)
        sgd.fit(X, np.log1p(y))
        coef_cutoff = np.percentile(np.abs(sgd.coef_), self.percentile_cutoff)
        self.features_to_keep = np.where(np.abs(sgd.coef_) >= coef_cutoff)[0]
        return self 
Example 20
Project: lale   Author: IBM   File: test_core_operators.py    License: Apache License 2.0 5 votes vote down vote up
def test_sgd_regressor(self):
        from lale.lib.sklearn import SGDRegressor

        reg = SGDRegressor(loss='squared_loss', epsilon=0.2)
        reg.fit(self.X_train, self.y_train) 
Example 21
Project: lale   Author: IBM   File: test_core_operators.py    License: Apache License 2.0 5 votes vote down vote up
def test_sgd_regressor_1(self):
        from lale.lib.sklearn import SGDRegressor

        reg = SGDRegressor(learning_rate='optimal', eta0=0.2)
        reg.fit(self.X_train, self.y_train) 
Example 22
Project: lale   Author: IBM   File: test_core_operators.py    License: Apache License 2.0 5 votes vote down vote up
def test_sgd_regressor_2(self):
        from lale.lib.sklearn import SGDRegressor

        reg = SGDRegressor(early_stopping=False, validation_fraction=0.2)
        reg.fit(self.X_train, self.y_train) 
Example 23
Project: lale   Author: IBM   File: test_core_operators.py    License: Apache License 2.0 5 votes vote down vote up
def test_sgd_regressor_3(self):
        from sklearn.linear_model import SGDRegressor

        reg = SGDRegressor(l1_ratio=0.2, penalty='l1')
        reg.fit(self.X_train, self.y_train) 
Example 24
Project: pycobra   Author: bhargavvader   File: cobra.py    License: MIT License 5 votes vote down vote up
def load_default(self, machine_list='basic'):
        """
        Loads 4 different scikit-learn regressors by default. The advanced list adds more machines. 

        Parameters
        ----------
        machine_list: optional, list of strings
            List of default machine names to be loaded.
        Returns
        -------
        self : returns an instance of self.
        """

        if machine_list == 'basic':
            machine_list = ['tree', 'ridge', 'random_forest', 'svm']
        if machine_list == 'advanced':
            machine_list=['lasso', 'tree', 'ridge', 'random_forest', 'svm', 'bayesian_ridge', 'sgd']

        self.estimators_ = {}
        for machine in machine_list:
            try:
                if machine == 'lasso':
                    self.estimators_['lasso'] = linear_model.LassoCV(random_state=self.random_state).fit(self.X_k_, self.y_k_)
                if machine == 'tree':
                    self.estimators_['tree'] = DecisionTreeRegressor(random_state=self.random_state).fit(self.X_k_, self.y_k_)
                if machine == 'ridge':
                    self.estimators_['ridge'] = linear_model.RidgeCV().fit(self.X_k_, self.y_k_)
                if machine == 'random_forest':
                    self.estimators_['random_forest'] = RandomForestRegressor(random_state=self.random_state).fit(self.X_k_, self.y_k_)
                if machine == 'svm':
                    self.estimators_['svm'] = LinearSVR(random_state=self.random_state).fit(self.X_k_, self.y_k_)
                if machine == 'sgd':
                    self.estimators_['sgd'] = linear_model.SGDRegressor(random_state=self.random_state).fit(self.X_k_, self.y_k_)
                if machine == 'bayesian_ridge':
                    self.estimators_['bayesian_ridge'] = linear_model.BayesianRidge().fit(self.X_k_, self.y_k_)
            except ValueError:
                continue
        return self 
Example 25
Project: pycobra   Author: bhargavvader   File: kernelcobra.py    License: MIT License 5 votes vote down vote up
def load_default(self, machine_list='basic'):
        """
        Loads 4 different scikit-learn regressors by default. The advanced list adds more machines. 
        Parameters
        ----------
        machine_list: optional, list of strings
            List of default machine names to be loaded. 
            Default is basic,
        Returns
        -------
        self : returns an instance of self.
        """
        if machine_list == 'basic':
            machine_list = ['tree', 'ridge', 'random_forest', 'svm']
        if machine_list == 'advanced':
            machine_list=['lasso', 'tree', 'ridge', 'random_forest', 'svm', 'bayesian_ridge', 'sgd']

        self.estimators_ = {}
        for machine in machine_list:
            try:
                if machine == 'lasso':
                    self.estimators_['lasso'] = linear_model.LassoCV(random_state=self.random_state).fit(self.X_k_, self.y_k_)
                if machine == 'tree':
                    self.estimators_['tree'] = DecisionTreeRegressor(random_state=self.random_state).fit(self.X_k_, self.y_k_)
                if machine == 'ridge':
                    self.estimators_['ridge'] = linear_model.RidgeCV().fit(self.X_k_, self.y_k_)
                if machine == 'random_forest':
                    self.estimators_['random_forest'] = RandomForestRegressor(random_state=self.random_state).fit(self.X_k_, self.y_k_)
                if machine == 'svm':
                    self.estimators_['svm'] = SVR().fit(self.X_k_, self.y_k_)
                if machine == 'sgd':
                    self.estimators_['sgd'] = linear_model.SGDRegressor(random_state=self.random_state).fit(self.X_k_, self.y_k_)
                if machine == 'bayesian_ridge':
                    self.estimators_['bayesian_ridge'] = linear_model.BayesianRidge().fit(self.X_k_, self.y_k_)
            except ValueError:
                continue
        return self 
Example 26
Project: Splunking-Crime   Author: nccgroup   File: SGDRegressor.py    License: GNU Affero General Public License v3.0 5 votes vote down vote up
def __init__(self, options):
        self.handle_options(options)

        out_params = convert_params(
            options.get('params', {}),
            bools=['fit_intercept'],
            ints=['random_state', 'n_iter'],
            floats=['l1_ratio', 'alpha', 'eta0', 'power_t'],
            strs=['penalty', 'learning_rate'],
        )

        self.scaler = StandardScaler()
        self.estimator = _SGDRegressor(**out_params)
        self.columns = None 
Example 27
Project: oddt   Author: oddt   File: PLECscore.py    License: BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def gen_json(self, home_dir=None, pdbbind_version=2016):
        if not home_dir:
            home_dir = path_join(dirname(__file__), 'PLECscore')

        if isinstance(self.model, SGDRegressor):
            attributes = ['coef_', 'intercept_', 't_']
        elif isinstance(self.model, MLPRegressor):
            attributes = ['loss_', 'coefs_', 'intercepts_', 'n_iter_',
                          'n_layers_', 'n_outputs_', 'out_activation_']

        out = {}
        for attr_name in attributes:
            attr = getattr(self.model, attr_name)
            # convert numpy arrays to list for json
            if isinstance(attr, np.ndarray):
                attr = attr.tolist()
            elif (isinstance(attr, (list, tuple)) and
                  isinstance(attr[0], np.ndarray)):
                attr = [x.tolist() for x in attr]
            out[attr_name] = attr

        json_path = path_join(home_dir, 'plecscore_%s_p%i_l%i_s%i_pdbbind%i.json' %
                              (self.version, self.depth_protein,
                               self.depth_ligand, self.size, pdbbind_version))

        with open(json_path, 'w') as json_f:
            json.dump(out, json_f, indent=2)
        return json_path 
Example 28
Project: sklearn2pmml   Author: jpmml   File: __init__.py    License: GNU Affero General Public License v3.0 5 votes vote down vote up
def test_lm(self):
		_checkLM(ElasticNet())
		_checkLM(LinearRegression())
		_checkLM(SGDRegressor()) 
Example 29
Project: AirBnbPricePrediction   Author: PouyaREZ   File: run_models.py    License: MIT License 5 votes vote down vote up
def linear_model_SGD(X_train, y_train, X_val, y_val):
    model = SGDRegressor()
    model.fit(X_train, y_train)
    print_evaluation_metrics(model, "sgd", X_val, y_val.values.ravel())
    print_evaluation_metrics2(model, "sgd", X_train, y_train.values.ravel()) 
Example 30
Project: aca   Author: geekinglcq   File: ILearner.py    License: MIT License 5 votes vote down vote up
def __init__(self):
        self.clf = linear_model.SGDRegressor(n_iter=50)