Python xgboost.sklearn.XGBClassifier() Examples

The following are 6 code examples of xgboost.sklearn.XGBClassifier(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module xgboost.sklearn , or try the search function .
Example #1
Source File: classifier.py    From Video-Highlight-Detection with MIT License 7 votes vote down vote up
def _build_model(self,model_name,params=None):
        if params==None:
            if model_name=='xgb':
                self.model=XGBClassifier(n_estimators=100,learning_rate=0.02)
            elif model_name=='svm':
                kernel_function=chi2_kernel if not (self.model_kernel=='linear' or self.model_kernel=='rbf') else self.model_kernel
                self.model=SVC(C=1,kernel=kernel_function,gamma=1,probability=True)
            elif model_name=='lr':
                self.model=LR(C=1,penalty='l1',tol=1e-6)
        else:
            if model_name=='xgb':
                self.model=XGBClassifier(n_estimators=1000,learning_rate=0.02,**params)
            elif model_name=='svm':
                self.model=SVC(C=1,kernel=kernel_function,gamma=1,probability=True)
            elif model_name=='lr':
                self.model=LR(C=1,penalty='l1',tol=1e-6)

        log.l.info('=======> built the model {} done'.format(self.model_name)) 
Example #2
Source File: tune_xgboost.py    From jh-kaggle-util with Apache License 2.0 5 votes vote down vote up
def modelfit(params,x,y):
    #Fit the algorithm on the data
    print("fit")
    alg = XGBClassifier(**params)
    alg.fit(x,y,verbose=True)
    feat_imp = pd.Series(alg.booster().get_fscore()).sort_values(ascending=False)
    print(feat_imp) 
Example #3
Source File: trainclasses.py    From predictatops with MIT License 5 votes vote down vote up
def init_XGBoost_withSettings(self):
        """
        Takes in 
        Returns
        """

        ##########################     Initial Machine Learning Using XGBoost classification   ##########################
        ##########################     Optional
        model = XGBClassifier(
            max_depth=3,
            objective="multi:softmax",  # error evaluation for multiclass training
            num_class=5,
            n_gpus=0,
            n_jobs=-1
            # gamma=gamma,
            # reg_alpha=reg_alpha,
            # max_depth=max_depth,
            # subsample=subsample,
            # colsample_bytree= colsample_bytree,
            # n_estimators= n_estimators,
            # learning_rate= learning_rate,
            # min_child_weight= min_child_weight,
            # n_jobs=n_jobs
            # params
        )
        print(
            " init_XGBoost_withSettings function has been called which initiates a XGBoost classifier with settings of : max_depth=4,objective='multi:softmax', training,num_class=5,n_gpus= 0,n_jobs=8"
        )
        print("model coming out of init_XGBoost_withSettings() function is:", model)
        return model 
Example #4
Source File: ml_workflow.py    From Crypto_trading_robot with MIT License 5 votes vote down vote up
def gridsearch_run(X_train, y_train):

    # Default classified which will be tuned
    xgb_model = XGBClassifier(
        n_estimators=100,
        max_depth=8,
        min_child_weight=1,
        gamma=0,
        subsample=0.5,
        colsample_bytree=0.5,
        learning_rate=0.1, # ok for Gridsearch
        objective='multi:softprob',
        silent=True,
        nthread=1,
        num_class=3
        )

    # A parameter grid for XGBoost
    params = set_gridsearch_params()

    clf = GridSearchCV(xgb_model,
        params,
        cv=list(KFold(n_splits=5, shuffle=True).split(X_train)), # at least 5 splits
        verbose=2,
        scoring='neg_log_loss',
        n_jobs=-1
        )

    grid_result = clf.fit(X_train, y_train.values.ravel())

    print("\n\nBest score: %f using %s" % (grid_result.best_score_, grid_result.best_params_))
    means = grid_result.cv_results_['mean_test_score']
    stds = grid_result.cv_results_['std_test_score']
    params = grid_result.cv_results_['params']
    print("\nStats:")
    for mean, stdev, param in zip(means, stds, params):
        print("%f (%f) with: %r" % (mean, stdev, param))

### Train - test and save 
Example #5
Source File: ABuMLCreater.py    From abu with GNU General Public License v3.0 5 votes vote down vote up
def xgb_classifier(self, assign=True, **kwargs):
        """
        有监督学习分类器,默认使用:
                        GBC(n_estimators=100)

        通过**kwargs即关键字参数透传GBC(**kwargs),即:
                        GBC(**kwargs)

        注意导入使用:
            try:
                from xgboost.sklearn import XGBClassifier as GBC
            except ImportError:
                from sklearn.ensemble import GradientBoostingClassifier as GBC


        :param assign: 是否保存实例后的分类器对象,默认True,self.clf = clf
        :param kwargs: 有参数情况下初始化: GBC(n_estimators=100)
                       无参数情况下初始化: GBC(**kwargs)

        :return: 实例化的GBC对象
        """
        if kwargs is not None and len(kwargs) > 0:
            clf = GBC(**kwargs)
        else:
            clf = GBC(n_estimators=100)
        if assign:
            self.clf = clf
        return clf 
Example #6
Source File: xgb_tune.py    From ml-parameter-optimization with MIT License 4 votes vote down vote up
def tune_params(self):
        """
        tune specified (and default) parameters
        """
        self._start_time = time.time()
        self.default_params() # set default parameters
        self.score_init() # set initial score
        iround = 0
        while iround<self.max_rounds:
            print('\nLearning rate for iteration %i: %f.' %(iround+1,self._params['learning_rate']))
            while self._step<5:
                istep_time = time.time()
                if self._step==0:
                    xgb = XGBClassifier(**self._params)
                    self.get_n_estimators(xgb)
                else:
                    self.apply_gridsearch(XGBClassifier(**self._params))
                self.print_progress(istep_time,iround=iround,max_rounds=self.max_rounds) # print params and performance
                self._step+=1
            
            # store model each iteration
            self._params_iround[iround] = {}
            for key,value in self._params.items():
                self._params_iround[iround][key] = value
            self._params_iround[iround]['model_score'] = self._temp_score
            
            # check if max_runtime is breached
            if (time.time() - self._start_time) > self.max_runtime:
                print('Tuning stopped after iteration %i. Max runtime of %i sec exceeded.'
                            %(iround+1,self.max_runtime))
                return
            
            # early stopping criterium
            if (iround>=self.running_rounds and 
                    self.best_score==self._params_iround[max(0,iround-self.running_rounds)]['model_score']):
                print('Tuning stopped after iteration %i. No model improvement for %i consecutive rounds.'
                            %(iround+1,self.running_rounds))
                return
            
            # update learning rate and reset n_estimators for next iteration
            if iround<self.max_rounds-1:
                self.update_learning_rate()
            
            if self._stop_learning:
                print('Tuning stopped after iteration %i. Minimum learning rate %f reached.'
                            %(iround+1,self._min_learning_rate))
                return
            
            self._step=0
            iround+=1
        
        return