Python hyperopt.hp.quniform() Examples

The following are 28 code examples of hyperopt.hp.quniform(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module hyperopt.hp , or try the search function .
Example #1
Source File: automl.py    From kddcup2019-automl with MIT License 8 votes vote down vote up
def hyperopt_lightgbm(X: pd.DataFrame, y: pd.Series, params: Dict, config: Config):
    X_train, X_val, y_train, y_val = data_split(X, y, test_size=0.5)
    train_data = lgb.Dataset(X_train, label=y_train)
    valid_data = lgb.Dataset(X_val, label=y_val)

    space = {
        "max_depth": hp.choice("max_depth", np.arange(2, 10, 1, dtype=int)),
        # smaller than 2^(max_depth)
        "num_leaves": hp.choice("num_leaves", np.arange(4, 200, 4, dtype=int)),
        "feature_fraction": hp.quniform("feature_fraction", 0.2, 0.8, 0.1),
        # "bagging_fraction": hp.quniform("bagging_fraction", 0.2, 0.8, 0.1),
        # "bagging_freq": hp.choice("bagging_freq", np.linspace(0, 10, 2, dtype=int)),
        # "scale_pos_weight":hp.uniform('scale_pos_weight',1.0, 10.0),
        # "colsample_by_tree":hp.uniform("colsample_bytree",0.5,1.0),
        "min_child_weight": hp.quniform('min_child_weight', 2, 50, 2),
        "reg_alpha": hp.uniform("reg_alpha", 2.0, 8.0),
        "reg_lambda": hp.uniform("reg_lambda", 2.0, 8.0),
        "learning_rate": hp.quniform("learning_rate", 0.05, 0.4, 0.01),
        # "learning_rate": hp.loguniform("learning_rate", np.log(0.04), np.log(0.5)),
        #
        "min_data_in_leaf": hp.choice('min_data_in_leaf', np.arange(200, 2000, 100, dtype=int)),
        #"is_unbalance": hp.choice("is_unbalance", [True])
    }

    def objective(hyperparams):
        model = lgb.train({**params, **hyperparams}, train_data, 300,
                          valid_data, early_stopping_rounds=45, verbose_eval=0)

        score = model.best_score["valid_0"][params["metric"]]

        # in classification, less is better
        return {'loss': -score, 'status': STATUS_OK}

    trials = Trials()
    best = hyperopt.fmin(fn=objective, space=space, trials=trials,
                         algo=tpe.suggest, max_evals=150, verbose=1,
                         rstate=np.random.RandomState(1))

    hyperparams = space_eval(space, best)
    log(f"auc = {-trials.best_trial['result']['loss']:0.4f} {hyperparams}")
    return hyperparams 
Example #2
Source File: doc2vec.py    From asreview with Apache License 2.0 6 votes vote down vote up
def full_hyper_space(self):
        from hyperopt import hp
        eps = 1e-7

        hyper_space, hyper_choices = super(Doc2Vec, self).full_hyper_space()
        hyper_space.update({
            "fex_vector_size": hp.quniform(
                "fex_vector_size", 31.5, 127.5-eps, 8),
            "fex_epochs": hp.quniform("fex_epochs", 20, 50, 1),
            "fex_min_count": hp.quniform("fex_min_count", 0.5, 2.499999, 1),
            "fex_window": hp.quniform("fex_window", 4.5, 9.4999999, 1),
            "fex_dm_concat": hp.randint("fex_dm_concat", 2),
            "fex_dm": hp.randint("fex_dm", 3),
            "fex_dbow_words": hp.randint("fex_dbow_words", 2),
        })

        return hyper_space, hyper_choices 
Example #3
Source File: average.py    From fnc-1 with Apache License 2.0 6 votes vote down vote up
def run():

    param_space = {

            'w0': 1.0,
            'w1': hp.quniform('w1', 0.01, 2.0, 0.01),
            'max_evals': 800
            }
    
    
    trial_counter = 0
    trials = Trials()
    objective = lambda p: hyperopt_wrapper(p)
    best_params = fmin(objective, param_space, algo=tpe.suggest,\
        trials = trials, max_evals=param_space["max_evals"])
    
    print 'best parameters: '
    for k, v in best_params.items():
        print "%s: %s" % (k ,v)
    
    trial_loss = np.asarray(trials.losses(), dtype=float)
    best_loss = min(trial_loss)
    print 'best loss: ', best_loss 
Example #4
Source File: test_pyll_util.py    From HPOlib with GNU General Public License v3.0 6 votes vote down vote up
def test_read_quniform(self):
        # TODO scope.int(hp.quniform('liblinear:LOG2_C', -5, 15, 1))
        # 0 float
        # 1   hyperopt_param
        # 2     Literal{l0eg_fsize}
        # 3     quniform
        # 4       Literal{2.50001}
        # 5       Literal{8.5}
        # 6       Literal{1}
        quniform = hp.quniform('l0eg_fsize', 2.50001, 8.5, 1). \
            inputs()[0].inputs()[1]
        ret = self.pyll_reader.read_quniform(quniform, 'l0eg_fsize')
        expected = configuration_space.UniformIntegerHyperparameter(
            'l0eg_fsize', 3, 8)
        self.assertEqual(expected, ret)

        l2_out_lp_psize = hp.quniform("l2_out_lp_psize", 0.50001, 5.5, 1). \
            inputs()[0].inputs()[1]
        ret = self.pyll_reader.read_quniform(l2_out_lp_psize, "l2_out_lp_psize")
        expected = configuration_space.UniformIntegerHyperparameter(
            "l2_out_lp_psize", 1, 5)
        self.assertEqual(expected, ret) 
Example #5
Source File: main.py    From pyprophet with BSD 3-Clause "New" or "Revised" License 6 votes vote down vote up
def score(infile, outfile, classifier, xgb_autotune, apply_weights, xeval_fraction, xeval_num_iter, ss_initial_fdr, ss_iteration_fdr, ss_num_iter, ss_main_score, group_id, parametric, pfdr, pi0_lambda, pi0_method, pi0_smooth_df, pi0_smooth_log_pi0, lfdr_truncate, lfdr_monotone, lfdr_transformation, lfdr_adj, lfdr_eps, level, ipf_max_peakgroup_rank, ipf_max_peakgroup_pep, ipf_max_transition_isotope_overlap, ipf_min_transition_sn, tric_chromprob, threads, test, ss_score_filter):
    """
    Conduct semi-supervised learning and error-rate estimation for MS1, MS2 and transition-level data. 
    """

    if outfile is None:
        outfile = infile
    else:
        outfile = outfile

    # Prepare XGBoost-specific parameters
    xgb_hyperparams = {'autotune': xgb_autotune, 'autotune_num_rounds': 10, 'num_boost_round': 100, 'early_stopping_rounds': 10, 'test_size': 0.33}

    xgb_params = {'eta': 0.3, 'gamma': 0, 'max_depth': 6, 'min_child_weight': 1, 'subsample': 1, 'colsample_bytree': 1, 'colsample_bylevel': 1, 'colsample_bynode': 1, 'lambda': 1, 'alpha': 0, 'scale_pos_weight': 1, 'silent': 1, 'objective': 'binary:logitraw', 'nthread': 1, 'eval_metric': 'auc'}

    xgb_params_space = {'eta': hp.uniform('eta', 0.0, 0.3), 'gamma': hp.uniform('gamma', 0.0, 0.5), 'max_depth': hp.quniform('max_depth', 2, 8, 1), 'min_child_weight': hp.quniform('min_child_weight', 1, 5, 1), 'subsample': 1, 'colsample_bytree': 1, 'colsample_bylevel': 1, 'colsample_bynode': 1, 'lambda': hp.uniform('lambda', 0.0, 1.0), 'alpha': hp.uniform('alpha', 0.0, 1.0), 'scale_pos_weight': 1.0, 'silent': 1, 'objective': 'binary:logitraw', 'nthread': 1, 'eval_metric': 'auc'}

    if not apply_weights:
        PyProphetLearner(infile, outfile, classifier, xgb_hyperparams, xgb_params, xgb_params_space, xeval_fraction, xeval_num_iter, ss_initial_fdr, ss_iteration_fdr, ss_num_iter, ss_main_score, group_id, parametric, pfdr, pi0_lambda, pi0_method, pi0_smooth_df, pi0_smooth_log_pi0, lfdr_truncate, lfdr_monotone, lfdr_transformation, lfdr_adj, lfdr_eps, level, ipf_max_peakgroup_rank, ipf_max_peakgroup_pep, ipf_max_transition_isotope_overlap, ipf_min_transition_sn, tric_chromprob, threads, test, ss_score_filter).run()
    else:
        PyProphetWeightApplier(infile, outfile, classifier, xgb_hyperparams, xgb_params, xgb_params_space, xeval_fraction, xeval_num_iter, ss_initial_fdr, ss_iteration_fdr, ss_num_iter, ss_main_score, group_id, parametric, pfdr, pi0_lambda, pi0_method, pi0_smooth_df, pi0_smooth_log_pi0, lfdr_truncate, lfdr_monotone, lfdr_transformation, lfdr_adj, lfdr_eps, level, ipf_max_peakgroup_rank, ipf_max_peakgroup_pep, ipf_max_transition_isotope_overlap, ipf_min_transition_sn, tric_chromprob, threads, test, apply_weights, ss_score_filter).run()


# IPF 
Example #6
Source File: test_pyll_util.py    From HPOlib with GNU General Public License v3.0 6 votes vote down vote up
def test_convert_conditional_space(self):
        a_or_b = configuration_space.CategoricalHyperparameter("a_or_b", ["a", "b"])
        cond_a = configuration_space.UniformFloatHyperparameter(
            'cond_a', 0, 1, conditions=[['a_or_b == a']])
        cond_b = configuration_space.UniformFloatHyperparameter(
            'cond_b', 0, 3, q=0.1, conditions=[['a_or_b == b']])
        conditional_space = {"a_or_b": a_or_b, "cond_a": cond_a, "cond_b": cond_b}
        cs = self.pyll_writer.write(conditional_space)
        expected = StringIO.StringIO()
        expected.write('from hyperopt import hp\nimport hyperopt.pyll as pyll')
        expected.write('\n\n')
        expected.write('param_0 = hp.uniform("cond_a", 0.0, 1.0)\n')
        expected.write('param_1 = hp.quniform("cond_b", -0.0499, 3.05, 0.1)\n')
        expected.write('param_2 = hp.choice("a_or_b", [\n')
        expected.write('    {"a_or_b": "a", "cond_a": param_0, },\n')
        expected.write('    {"a_or_b": "b", "cond_b": param_1, },\n')
        expected.write('    ])\n\n')
        expected.write('space = {"a_or_b": param_2}\n')
        self.assertEqual(expected.getvalue(), cs) 
Example #7
Source File: test_pyll_util.py    From HPOlib with GNU General Public License v3.0 6 votes vote down vote up
def test_operator_in(self):
        a_or_b = configuration_space.CategoricalHyperparameter("a_or_b", ["a", "b"])
        cond_a = configuration_space.UniformFloatHyperparameter(
            'cond_a', 0, 1, conditions=[['a_or_b == a']])
        cond_b = configuration_space.UniformFloatHyperparameter(
            'cond_b', 0, 3, q=0.1, conditions=[['a_or_b == b']])
        e = configuration_space.UniformFloatHyperparameter("e", 0, 5,
                                     conditions=[['a_or_b in {a,b}']])
        conditional_space_operator_in = {"a_or_b": a_or_b, "cond_a": cond_a,
                                 "cond_b": cond_b, "e": e}
        cs = self.pyll_writer.write(conditional_space_operator_in)
        expected = StringIO.StringIO()
        expected.write('from hyperopt import hp\nimport hyperopt.pyll as pyll')
        expected.write('\n\n')
        expected.write('param_0 = hp.uniform("cond_a", 0.0, 1.0)\n')
        expected.write('param_1 = hp.quniform("cond_b", -0.0499, 3.05, 0.1)\n')
        expected.write('param_2 = hp.uniform("e", 0.0, 5.0)\n')
        expected.write('param_3 = hp.choice("a_or_b", [\n')
        expected.write('    {"a_or_b": "a", "cond_a": param_0, "e": param_2, '
                       '},\n')
        expected.write('    {"a_or_b": "b", "cond_b": param_1, "e": param_2, '
                       '},\n')
        expected.write('    ])\n\n')
        expected.write('space = {"a_or_b": param_3}\n')
        self.assertEqual(expected.getvalue(), cs) 
Example #8
Source File: gaussian_process.py    From PES-Learn with BSD 3-Clause "New" or "Revised" License 6 votes vote down vote up
def set_default_hyperparameters(self):
        """
        Set default hyperparameter space. If none is provided, default is used.
        """
        self.hyperparameter_space = {
                                    'scale_X': hp.choice('scale_X', ['std', 'mm01', 'mm11', None]),
                                    'scale_y': hp.choice('scale_y', ['std', 'mm01', 'mm11', None]),
                                    }

        if self.input_obj.keywords['pes_format'] == 'interatomics':
            self.set_hyperparameter('morse_transform', hp.choice('morse_transform',[{'morse': True,'morse_alpha': hp.quniform('morse_alpha', 1, 2, 0.1)},{'morse': False}]))
        else:
            self.set_hyperparameter('morse_transform', hp.choice('morse_transform',[{'morse': False}]))
        if self.pip:
            val =  hp.choice('pip',[{'pip': True,'degree_reduction': hp.choice('degree_reduction', [True,False])}])
            self.set_hyperparameter('pip', val)
        else:
            self.set_hyperparameter('pip', hp.choice('pip', [{'pip': False}]))

        if self.input_obj.keywords['gp_ard'] == 'opt': # auto relevancy determination (independant length scales for each feature)
            self.set_hyperparameter('ARD', hp.choice('ARD', [True,False]))
         #TODO add optional space inclusions, something like: if option: self.hyperparameter_space['newoption'] = hp.choice(..) 
Example #9
Source File: test_pyll_util.py    From HPOlib with GNU General Public License v3.0 6 votes vote down vote up
def test_write_quniform_int(self):
        b_int_1 = configuration_space.UniformIntegerHyperparameter("b_int", 0, 3, q=1.0)
        expected = ("b_int", 'param_0 = pyll.scope.int(hp.quniform('
                    '"b_int", -0.49999, 3.5, 1.0))')
        value = self.pyll_writer.write_hyperparameter(b_int_1, None)
        self.assertEqual(expected, value)

        # TODO: trying to add the same parameter name a second time, maybe an
        #  error should be raised!
        b_int_2 = configuration_space.UniformIntegerHyperparameter("b_int", 0, 3, q=2.0)
        expected = ("b_int", 'param_1 = pyll.scope.int(hp.quniform('
                    '"b_int", -0.49999, 3.5, 2.0))')
        value = self.pyll_writer.write_hyperparameter(b_int_2, None)
        self.assertEqual(expected, value)

        b_int_3 = configuration_space.UniformIntegerHyperparameter(
            "b_int", 1, 100, base=10)
        b_int_3.name = self.pyll_writer.convert_name(b_int_3)
        # TODO: this is an example of non-uniform integer sampling!
        expected = ('LOG10_Q1_b_int', 'param_1 = hp.uniform('
                    '"LOG10_Q1_b_int", -0.301021309861, 2.00216606176)')
        value = self.pyll_writer.write_hyperparameter(b_int_3, None)
        self.assertEqual(expected, value) 
Example #10
Source File: nn_2_layer.py    From asreview with Apache License 2.0 5 votes vote down vote up
def full_hyper_space(self):
        from hyperopt import hp
        hyper_choices = {
            "mdl_optimizer": ["sgd", "rmsprop", "adagrad", "adam", "nadam"]
        }
        hyper_space = {
            "mdl_dense_width": hp.quniform("mdl_dense_width", 2, 100, 1),
            "mdl_epochs": hp.quniform("mdl_epochs", 20, 60, 1),
            "mdl_optimizer": hp.choice("mdl_optimizer",
                                       hyper_choices["mdl_optimizer"]),
            "mdl_learn_rate": hp.lognormal("mdl_learn_rate", 0, 1),
            "mdl_class_weight": hp.lognormal("mdl_class_weight", 3, 1),
            "mdl_regularization": hp.lognormal("mdl_regularization", -4, 2),
        }
        return hyper_space, hyper_choices 
Example #11
Source File: lstm_pool.py    From asreview with Apache License 2.0 5 votes vote down vote up
def full_hyper_space(self):
        from hyperopt import hp

        hyper_choices = {}
        hyper_space = {
            "mdl_dropout": hp.uniform("mdl_dropout", 0, 0.9),
            "mdl_lstm_out_width": hp.quniform("mdl_lstm_out_width", 1, 50, 1),
            "mdl_dense_width": hp.quniform("mdl_dense_width", 1, 200, 1),
            "mdl_learn_rate_mult": hp.lognormal("mdl_learn_rate_mult", 0, 1)
        }
        return hyper_space, hyper_choices 
Example #12
Source File: rf.py    From asreview with Apache License 2.0 5 votes vote down vote up
def full_hyper_space(self):
        from hyperopt import hp
        hyper_choices = {}
        hyper_space = {
            "mdl_n_estimators": hp.quniform("mdl_n_estimators", 10, 100, 1),
            "mdl_max_features": hp.quniform("mdl_max_features", 6, 10, 1),
            "mdl_class_weight": hp.lognormal('mdl_class_weight', 0, 1),
        }
        return hyper_space, hyper_choices 
Example #13
Source File: cluster.py    From asreview with Apache License 2.0 5 votes vote down vote up
def full_hyper_space(self):
        from hyperopt import hp
        parameter_space = {
            "qry_cluster_size": hp.quniform('qry_cluster_size', 50, 1000, 1),
            "qry_update_interval": hp.quniform(
                'qry_update_interval', 100, 300, 1),
        }
        return parameter_space, {} 
Example #14
Source File: neural_network.py    From PES-Learn with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def set_default_hyperparameters(self, nn_search_space=1):
        """
        Set default hyperparameter space. If none is provided, default is used.

        Parameters
        ----------
        nn_search_space : int
            Which tier of default hyperparameter search spaces to use. Neural networks have too many hyperparameter configurations to search across, 
            so this option reduces the number of variable hyperparameters to search over. Generally, larger integer => more hyperparameters, and more iterations of hp_maxit are recommended.
        """
        if nn_search_space == 1:
            self.hyperparameter_space = {
            'scale_X': hp.choice('scale_X',
                     [
                     {'scale_X': 'mm11',
                          'activation': hp.choice('activ2', ['tanh'])},
                     {'scale_X': 'std',
                          'activation': hp.choice('activ3', ['tanh'])},
                     ]),
            'scale_y': hp.choice('scale_y', ['std', 'mm01', 'mm11']),}
        # TODO make more expansive search spaces, benchmark them, expose them as input options
        #elif nn_search_space == 2:
        #elif nn_search_space == 3:
        else:
            raise Exception("Invalid search space specification")

        # Standard geometry transformations, always use these.
        if self.input_obj.keywords['pes_format'] == 'interatomics':
            self.set_hyperparameter('morse_transform', hp.choice('morse_transform',[{'morse': True,'morse_alpha': hp.quniform('morse_alpha', 1, 2, 0.1)},{'morse': False}]))
        else:
            self.set_hyperparameter('morse_transform', hp.choice('morse_transform',[{'morse': False}]))
        if self.pip:
            val =  hp.choice('pip',[{'pip': True,'degree_reduction': hp.choice('degree_reduction', [True,False])}])
            self.set_hyperparameter('pip', val)
        else:
            self.set_hyperparameter('pip', hp.choice('pip', [{'pip': False}])) 
Example #15
Source File: test_pyll_util.py    From HPOlib with GNU General Public License v3.0 5 votes vote down vote up
def test_write_quniform(self):
        b = configuration_space.UniformFloatHyperparameter("b", 0, 3, q=0.1)
        expected = ("b", 'param_0 = hp.quniform("b", -0.0499, 3.05, 0.1)')
        value = self.pyll_writer.write_hyperparameter(b, None)
        self.assertEqual(expected, value)

        b = configuration_space.UniformFloatHyperparameter(
            "b", 0.1, 3, q=0.1, base=10)
        b.name = self.pyll_writer.convert_name(b)
        expected = ('LOG10_Q0.100000_b', 'param_1 = hp.uniform('
                    '"LOG10_Q0.100000_b", -1.30016227413, 0.484299839347)')
        value = self.pyll_writer.write_hyperparameter(b, None)
        self.assertEqual(expected, value) 
Example #16
Source File: test_pyll_util.py    From HPOlib with GNU General Public License v3.0 5 votes vote down vote up
def test_convert_complex_space(self):
        cs = self.pyll_writer.write(config_space)
        expected = StringIO.StringIO()
        expected.write('from hyperopt import hp\nimport hyperopt.pyll as pyll')
        expected.write('\n\n')
        expected.write('param_0 = hp.uniform("LOG2_C", -5.0, 15.0)\n')
        expected.write('param_1 = hp.uniform("LOG2_gamma", -14.9999800563, '
                       '3.0)\n')
        expected.write('param_2 = hp.choice("kernel", [\n')
        expected.write('    {"kernel": "linear", },\n')
        expected.write('    {"kernel": "rbf", "LOG2_gamma": param_1, },\n')
        expected.write('    ])\n')
        expected.write('param_3 = hp.uniform("lr", 0.0001, 1.0)\n')
        expected.write('param_4 = pyll.scope.int(hp.quniform('
                       '"neurons", 15.50001, 1024.5, 16.0))\n')
        expected.write('param_5 = hp.choice("classifier", [\n')
        expected.write('    {"classifier": "nn", "lr": param_3, "neurons": '
                       'param_4, },\n')
        expected.write('    {"classifier": "svm", "LOG2_C": param_0, '
                       '"kernel": param_2, },\n')
        expected.write('    ])\n')
        expected.write('param_6 = hp.choice("preprocessing", [\n')
        expected.write('    {"preprocessing": "None", },\n')
        expected.write('    {"preprocessing": "pca", },\n')
        expected.write('    ])\n\n')
        expected.write('space = {"classifier": param_5, '
                       '"preprocessing": param_6}\n')
        self.assertEqual(expected.getvalue(), cs)

        self.pyll_writer.reset_hyperparameter_countr()
        expected.seek(0)
        cs = self.pyll_writer.write(config_space_2)
        self.assertEqual(expected.getvalue().replace("gamma", "gamma_2"), cs) 
Example #17
Source File: test_pyll_util.py    From HPOlib with GNU General Public License v3.0 5 votes vote down vote up
def test_convert_configuration_space(self):
        a = configuration_space.UniformFloatHyperparameter("a", 0, 1)
        b = configuration_space.UniformFloatHyperparameter("b", 0, 3, q=0.1)

        expected = StringIO.StringIO()
        expected.write('from hyperopt import hp\nimport hyperopt.pyll as pyll')
        expected.write('\n\n')
        expected.write('param_0 = hp.uniform("a", 0.0, 1.0)\n')
        expected.write('param_1 = hp.quniform("b", -0.0499, 3.05, 0.1)\n\n')
        expected.write('space = {"a": param_0, "b": param_1}\n')
        simple_space = {"a": a, "b": b}
        cs = self.pyll_writer.write(simple_space)
        self.assertEqual(expected.getvalue(), cs) 
Example #18
Source File: ch06-01-hopt.py    From kagglebook with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def predict(self, x):
        data = xgb.DMatrix(x)
        pred = self.model.predict(data)
        return pred


# -----------------------------------
# 探索するパラメータの空間の指定
# -----------------------------------
# hp.choiceでは、複数の選択肢から選ぶ
# hp.uniformでは、下限・上限を指定した一様分布から抽出する。引数は下限・上限
# hp.quniformでは、下限・上限を指定した一様分布のうち一定の間隔ごとの点から抽出する。引数は下限・上限・間隔
# hp.loguniformでは、下限・上限を指定した対数が一様分布に従う分布から抽出する。引数は下限・上限の対数をとった値 
Example #19
Source File: strategy.py    From ebisu with MIT License 5 votes vote down vote up
def options(self):
        return {
            'variant_type': hp.quniform('variant_type', 0, len(self.variants) - 1, 1),
            'basis_len': hp.quniform('basis_len', 1, 30, 1),
            'resolution': hp.quniform('resolution', 1, 10, 1),
            'sma_len': hp.quniform('sma_len', 1, 15, 1),
            'div_threshold': hp.quniform('div_threshold', 1, 6, 0.1),
        } 
Example #20
Source File: strategy.py    From ebisu with MIT License 5 votes vote down vote up
def options(self):
        return {
            'rcv_short_len': hp.quniform('rcv_short_len', 1, 10, 1),
            'rcv_medium_len': hp.quniform('rcv_medium_len', 5, 15, 1),
            'rcv_long_len': hp.quniform('rcv_long_len', 10, 20, 1),
        } 
Example #21
Source File: strategy.py    From ebisu with MIT License 5 votes vote down vote up
def options(self):
        return {
            'fast_len': hp.quniform('fast_len', 1, 30, 1),
            'slow_len': hp.quniform('slow_len', 1, 30, 1),
        } 
Example #22
Source File: automl.py    From KDDCup2019_admin with MIT License 5 votes vote down vote up
def hyperopt_lightgbm(X_train: pd.DataFrame, y_train: pd.Series, params: Dict, config: Config, max_evals=10):
    X_train, X_test, y_train, y_test = data_split_by_time(X_train, y_train, test_size=0.2)
    X_train, X_val, y_train, y_val = data_split_by_time(X_train, y_train, test_size=0.3)
    train_data = lgb.Dataset(X_train, label=y_train)
    valid_data = lgb.Dataset(X_val, label=y_val)

    space = {
        "learning_rate": hp.loguniform("learning_rate", np.log(0.01), np.log(0.5)),
        #"max_depth": hp.choice("max_depth", [-1, 2, 3, 4, 5, 6]),
        "max_depth": hp.choice("max_depth", [1, 2, 3, 4, 5, 6]),
        "num_leaves": hp.choice("num_leaves", np.linspace(10, 200, 50, dtype=int)),
        "feature_fraction": hp.quniform("feature_fraction", 0.5, 1.0, 0.1),
        "bagging_fraction": hp.quniform("bagging_fraction", 0.5, 1.0, 0.1),
        "bagging_freq": hp.choice("bagging_freq", np.linspace(0, 50, 10, dtype=int)),
        "reg_alpha": hp.uniform("reg_alpha", 0, 2),
        "reg_lambda": hp.uniform("reg_lambda", 0, 2),
        "min_child_weight": hp.uniform('min_child_weight', 0.5, 10),
    }

    def objective(hyperparams):
        if config.time_left() < 50:
            return {'status': STATUS_FAIL}
        else:
            model = lgb.train({**params, **hyperparams}, train_data, 100,
                          valid_data, early_stopping_rounds=10, verbose_eval=0)
            pred = model.predict(X_test)
            score = roc_auc_score(y_test, pred)

            #score = model.best_score["valid_0"][params["metric"]]

            # in classification, less is better
            return {'loss': -score, 'status': STATUS_OK}

    trials = Trials()
    best = hyperopt.fmin(fn=objective, space=space, trials=trials,
                         algo=tpe.suggest, max_evals=max_evals, verbose=1,
                         rstate=np.random.RandomState(1))

    hyperparams = space_eval(space, best)
    log(f"auc = {-trials.best_trial['result']['loss']:0.4f} {hyperparams}")
    return hyperparams 
Example #23
Source File: hyperopt_optimizer.py    From bayesmark with Apache License 2.0 4 votes vote down vote up
def get_hyperopt_dimensions(api_config):
        """Help routine to setup hyperopt search space in constructor.

        Take api_config as argument so this can be static.
        """
        # The ordering of iteration prob makes no difference, but just to be
        # safe and consistnent with space.py, I will make sorted.
        param_list = sorted(api_config.keys())

        space = {}
        round_to_values = {}
        for param_name in param_list:
            param_config = api_config[param_name]

            param_type = param_config["type"]

            param_space = param_config.get("space", None)
            param_range = param_config.get("range", None)
            param_values = param_config.get("values", None)

            # Some setup for case that whitelist of values is provided:
            values_only_type = param_type in ("cat", "ordinal")
            if (param_values is not None) and (not values_only_type):
                assert param_range is None
                param_values = np.unique(param_values)
                param_range = (param_values[0], param_values[-1])
                round_to_values[param_name] = interp1d(
                    param_values, param_values, kind="nearest", fill_value="extrapolate"
                )

            if param_type == "int":
                low, high = param_range
                if param_space in ("log", "logit"):
                    space[param_name] = hp.qloguniform(param_name, np.log(low), np.log(high), 1)
                else:
                    space[param_name] = hp.quniform(param_name, low, high, 1)
            elif param_type == "bool":
                assert param_range is None
                assert param_values is None
                space[param_name] = hp.choice(param_name, (False, True))
            elif param_type in ("cat", "ordinal"):
                assert param_range is None
                space[param_name] = hp.choice(param_name, param_values)
            elif param_type == "real":
                low, high = param_range
                if param_space in ("log", "logit"):
                    space[param_name] = hp.loguniform(param_name, np.log(low), np.log(high))
                else:
                    space[param_name] = hp.uniform(param_name, low, high)
            else:
                assert False, "type %s not handled in API" % param_type

        return space, round_to_values 
Example #24
Source File: hyperopt_optimizer.py    From bayesmark with Apache License 2.0 4 votes vote down vote up
def get_hyperopt_dimensions(api_config):
        """Help routine to setup hyperopt search space in constructor.

        Take api_config as argument so this can be static.
        """
        # The ordering of iteration prob makes no difference, but just to be
        # safe and consistnent with space.py, I will make sorted.
        param_list = sorted(api_config.keys())

        space = {}
        round_to_values = {}
        for param_name in param_list:
            param_config = api_config[param_name]

            param_type = param_config["type"]

            param_space = param_config.get("space", None)
            param_range = param_config.get("range", None)
            param_values = param_config.get("values", None)

            # Some setup for case that whitelist of values is provided:
            values_only_type = param_type in ("cat", "ordinal")
            if (param_values is not None) and (not values_only_type):
                assert param_range is None
                param_values = np.unique(param_values)
                param_range = (param_values[0], param_values[-1])
                round_to_values[param_name] = interp1d(
                    param_values, param_values, kind="nearest", fill_value="extrapolate"
                )

            if param_type == "int":
                low, high = param_range
                if param_space in ("log", "logit"):
                    space[param_name] = hp.qloguniform(param_name, np.log(low), np.log(high), 1)
                else:
                    space[param_name] = hp.quniform(param_name, low, high, 1)
            elif param_type == "bool":
                assert param_range is None
                assert param_values is None
                space[param_name] = hp.choice(param_name, (False, True))
            elif param_type in ("cat", "ordinal"):
                assert param_range is None
                space[param_name] = hp.choice(param_name, param_values)
            elif param_type == "real":
                low, high = param_range
                if param_space in ("log", "logit"):
                    space[param_name] = hp.loguniform(param_name, np.log(low), np.log(high))
                else:
                    space[param_name] = hp.uniform(param_name, low, high)
            else:
                assert False, "type %s not handled in API" % param_type

        return space, round_to_values 
Example #25
Source File: lale_hyperopt.py    From lale with Apache License 2.0 4 votes vote down vote up
def visitSearchSpaceNumber(self, space:SearchSpaceNumber, path:str, counter=None):
        label = self.mk_label(path, counter)

        if space.pgo is not None:
            return scope.pgo_sample(space.pgo, hp.quniform(label, 0, len(space.pgo)-1, 1))

        dist = "uniform"
        if space.distribution:
            dist = space.distribution

        if space.maximum is None:
            raise SearchSpaceError(path, f"maximum not specified for a number with distribution {dist}")
        max = space.getInclusiveMax()

        # These distributions need only a maximum
        if dist == "integer":
            if not space.discrete:
                raise SearchSpaceError(path, "integer distribution specified for a non discrete numeric type")
            return hp.randint(label, max)

        if space.minimum is None:
            raise SearchSpaceError(path, f"minimum not specified for a number with distribution {dist}")
        min = space.getInclusiveMin()

        if dist == "uniform":
            if space.discrete:
                return scope.int(hp.quniform(label, min, max, 1))
            else:
                return hp.uniform(label, min, max)
        elif dist == "loguniform":
            # for log distributions, hyperopt requires that we provide the log of the min/max
            if min <= 0:
                raise SearchSpaceError(path, f"minimum of 0 specified with a {dist} distribution.  This is not allowed; please set it (possibly using minimumForOptimizer) to be positive")
            if min > 0:
                min = math.log(min)
            if max > 0:
                max = math.log(max)
            if space.discrete:
                return scope.int(hp.qloguniform(label, min, max, 1))
            else:
                return hp.loguniform(label, min, max)

        else:
            raise SearchSpaceError(path, f"Unknown distribution type: {dist}") 
Example #26
Source File: lale_hyperopt.py    From lale with Apache License 2.0 4 votes vote down vote up
def visitSearchSpaceNumber(self, space:SearchSpaceNumber, path:str, counter=None, useCounter=True):
        label = self.mk_label(path, counter, useCounter=useCounter)

        if space.pgo is not None:
            self.pgo_dict[label] = space.pgo
            return f"scope.pgo_sample(pgo_{label}, hp.quniform('{label}', {0}, {len(space.pgo)-1}, 1))"


        dist = "uniform"
        if space.distribution:
            dist = space.distribution

        if space.maximum is None:
            SearchSpaceError(path, f"maximum not specified for a number with distribution {dist}")
        max = space.getInclusiveMax()

        # These distributions need only a maximum
        if dist == "integer":
            if not space.discrete:
                raise SearchSpaceError(path, "integer distribution specified for a non discrete numeric type....")

            return f"hp.randint('{label}', {max})"

        if space.minimum is None:
            raise SearchSpaceError(path, f"minimum not specified for a number with distribution {dist}")
        min = space.getInclusiveMin()

        if dist == "uniform":
            if space.discrete:
                return f"hp.quniform('{label}', {min}, {max}, 1)"
            else:
                return f"hp.uniform('{label}', {min}, {max})"
        elif dist == "loguniform":
            # for log distributions, hyperopt requires that we provide the log of the min/max
            if min <= 0:
                    raise SearchSpaceError(path, f"minimum of 0 specified with a {dist} distribution.  This is not allowed; please set it (possibly using minimumForOptimizer) to be positive")
            if min > 0:
                min = math.log(min)
            if max > 0:
                max = math.log(max)

            if space.discrete:
                return f"hp.qloguniform('{label}', {min}, {max}, 1)"
            else:
                return f"hp.loguniform('{label}', {min}, {max})"
        else:
            raise SearchSpaceError(path, f"Unknown distribution type: {dist}") 
Example #27
Source File: topic_model_parameter_tuner.py    From yelp with GNU Lesser General Public License v2.1 4 votes vote down vote up
def tune_parameters():

    from utils.constants import Constants

    context_name = '_context' if Constants.USE_CONTEXT else '_nocontext'

    mongo_url =\
        'mongo://localhost:1234/topicmodel_' +\
        Constants.ITEM_TYPE + context_name + '/jobs'
    trials = MongoTrials(mongo_url, exp_key='exp1')

    print('Connected to %s' % mongo_url)

    space =\
        hp.choice(Constants.USE_CONTEXT_FIELD, [
            {
                Constants.BUSINESS_TYPE_FIELD: Constants.ITEM_TYPE,
                # 'lda_alpha': hp.uniform('lda_alpha', 0, 1),
                # 'lda_beta': hp.uniform('lda_beta', 0, 2),
                Constants.CONTEXT_EXTRACTOR_EPSILON_FIELD: hp.uniform(
                    Constants.CONTEXT_EXTRACTOR_EPSILON_FIELD, 0, 0.5),
                Constants.TOPIC_MODEL_ITERATIONS_FIELD: hp.quniform(
                    Constants.TOPIC_MODEL_ITERATIONS_FIELD, 50, 500, 1),
                Constants.TOPIC_MODEL_PASSES_FIELD: hp.quniform(
                    Constants.TOPIC_MODEL_PASSES_FIELD, 1, 100, 1),
                Constants.TOPIC_MODEL_NUM_TOPICS_FIELD: hp.quniform(
                    Constants.TOPIC_MODEL_NUM_TOPICS_FIELD, 1, 1000, 1),
                # 'topic_weighting_method': hp.choice(
                #     'topic_weighting_method',
                #     ['probability', 'binary', 'all_topics']),
                Constants.USE_CONTEXT_FIELD: True
            },
        ])

    best = fmin(
        run_recommender, space=space, algo=tpe.suggest,
        max_evals=1000, trials=trials)

    print('losses', sorted(trials.losses()))
    print(
        'best', trials.best_trial['result'], trials.best_trial['misc']['vals'])
    print('num trials: %d' % len(trials.losses())) 
Example #28
Source File: automl.py    From KDDCup2019_admin with MIT License 4 votes vote down vote up
def hyperopt_lightgbm_basic(X, y, params, config, max_evals=50):
    X_train, X_test, y_train, y_test = data_split_by_time(X, y, test_size=0.2)
    X_train, X_val, y_train, y_val = data_split_by_time(X, y, test_size=0.3)
    train_data = lgb.Dataset(X_train, label=y_train)
    val_data = lgb.Dataset(X_val, label=y_val)

    space = {
        "learning_rate": hp.loguniform("learning_rate", np.log(0.01), np.log(0.5)),
        #"forgetting_factor": hp.loguniform("forgetting_factor", 0.01, 0.1)
        #"max_depth": hp.choice("max_depth", [-1, 2, 3, 4, 5, 6]),
        "max_depth": hp.choice("max_depth", [1, 2, 3, 4, 5, 6]),
        "num_leaves": hp.choice("num_leaves", np.linspace(10, 200, 50, dtype=int)),
        "feature_fraction": hp.quniform("feature_fraction", 0.5, 1.0, 0.1),
        "bagging_fraction": hp.quniform("bagging_fraction", 0.5, 1.0, 0.1),
        "bagging_freq": hp.choice("bagging_freq", np.linspace(0, 50, 10, dtype=int)),
        "reg_alpha": hp.uniform("reg_alpha", 0, 2),
        "reg_lambda": hp.uniform("reg_lambda", 0, 2),
        "min_child_weight": hp.uniform('min_child_weight', 0.5, 10),
    }

    def objective(hyperparams):
        model = lgb.train({**params, **hyperparams}, train_data, 100,
                        val_data, early_stopping_rounds=30, verbose_eval=0)
        pred = model.predict(X_test)
        score = roc_auc_score(y_test, pred)
        return {'loss': -score, 'status': STATUS_OK}

    trials = Trials()
    best = hyperopt.fmin(fn=objective, space=space, trials=trials,
                         algo=tpe.suggest, max_evals=max_evals, verbose=1,
                         rstate=np.random.RandomState(1))

    hyperparams = space_eval(space, best)
    log(f"auc = {-trials.best_trial['result']['loss']:0.4f} {hyperparams}")
    return hyperparams