Python hyperopt.Trials() Examples

The following are 30 code examples of hyperopt.Trials(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module hyperopt , or try the search function .
Example #1
Source File: test_hyopt.py    From kopt with MIT License 9 votes vote down vote up
def test_compilefn_train_test_split(tmpdir):
    db_name = "test"
    exp_name = "test2"
    fn = CompileFN(db_name, exp_name,
                   data_fn=data.data,
                   model_fn=model.build_model,
                   optim_metric="acc",
                   optim_metric_mode="max",
                   # eval
                   valid_split=.5,
                   stratified=False,
                   random_state=True,
                   save_dir="/tmp/")
    hyper_params = {
        "data": {},
        "shared": {"max_features": 100, "maxlen": 20},
        "model": {"filters": hp.choice("m_filters", (2, 5)),
                  "hidden_dims": 3,
                  },
        "fit": {"epochs": 1}
    }
    fn_test(fn, hyper_params, tmp_dir=str(tmpdir))
    trials = Trials()
    best = fmin(fn, hyper_params, trials=trials, algo=tpe.suggest, max_evals=2)
    assert isinstance(best, dict) 
Example #2
Source File: automl.py    From kddcup2019-automl with MIT License 8 votes vote down vote up
def hyperopt_lightgbm(X: pd.DataFrame, y: pd.Series, params: Dict, config: Config):
    X_train, X_val, y_train, y_val = data_split(X, y, test_size=0.5)
    train_data = lgb.Dataset(X_train, label=y_train)
    valid_data = lgb.Dataset(X_val, label=y_val)

    space = {
        "max_depth": hp.choice("max_depth", np.arange(2, 10, 1, dtype=int)),
        # smaller than 2^(max_depth)
        "num_leaves": hp.choice("num_leaves", np.arange(4, 200, 4, dtype=int)),
        "feature_fraction": hp.quniform("feature_fraction", 0.2, 0.8, 0.1),
        # "bagging_fraction": hp.quniform("bagging_fraction", 0.2, 0.8, 0.1),
        # "bagging_freq": hp.choice("bagging_freq", np.linspace(0, 10, 2, dtype=int)),
        # "scale_pos_weight":hp.uniform('scale_pos_weight',1.0, 10.0),
        # "colsample_by_tree":hp.uniform("colsample_bytree",0.5,1.0),
        "min_child_weight": hp.quniform('min_child_weight', 2, 50, 2),
        "reg_alpha": hp.uniform("reg_alpha", 2.0, 8.0),
        "reg_lambda": hp.uniform("reg_lambda", 2.0, 8.0),
        "learning_rate": hp.quniform("learning_rate", 0.05, 0.4, 0.01),
        # "learning_rate": hp.loguniform("learning_rate", np.log(0.04), np.log(0.5)),
        #
        "min_data_in_leaf": hp.choice('min_data_in_leaf', np.arange(200, 2000, 100, dtype=int)),
        #"is_unbalance": hp.choice("is_unbalance", [True])
    }

    def objective(hyperparams):
        model = lgb.train({**params, **hyperparams}, train_data, 300,
                          valid_data, early_stopping_rounds=45, verbose_eval=0)

        score = model.best_score["valid_0"][params["metric"]]

        # in classification, less is better
        return {'loss': -score, 'status': STATUS_OK}

    trials = Trials()
    best = hyperopt.fmin(fn=objective, space=space, trials=trials,
                         algo=tpe.suggest, max_evals=150, verbose=1,
                         rstate=np.random.RandomState(1))

    hyperparams = space_eval(space, best)
    log(f"auc = {-trials.best_trial['result']['loss']:0.4f} {hyperparams}")
    return hyperparams 
Example #3
Source File: automl.py    From Kaggler with MIT License 7 votes vote down vote up
def optimize_hyperparam(self, X, y, test_size=.2, n_eval=100):
        X_trn, X_val, y_trn, y_val = train_test_split(X, y, test_size=test_size, shuffle=self.shuffle)

        train_data = lgb.Dataset(X_trn, label=y_trn)
        valid_data = lgb.Dataset(X_val, label=y_val)

        def objective(hyperparams):
            model = lgb.train({**self.params, **hyperparams}, train_data, self.n_est,
                              valid_data, early_stopping_rounds=self.n_stop, verbose_eval=0)

            score = model.best_score["valid_0"][self.metric] * self.loss_sign

            return {'loss': score, 'status': STATUS_OK, 'model': model}

        trials = Trials()
        best = hyperopt.fmin(fn=objective, space=self.space, trials=trials,
                             algo=tpe.suggest, max_evals=n_eval, verbose=1,
                             rstate=self.random_state)

        hyperparams = space_eval(self.space, best)
        return hyperparams, trials 
Example #4
Source File: automl.py    From Kaggler with MIT License 7 votes vote down vote up
def optimize_hyperparam(self, X, y, test_size=.2, n_eval=100):
        X_trn, X_val, y_trn, y_val = train_test_split(X, y, test_size=test_size, shuffle=self.shuffle)

        def objective(hyperparams):
            model = XGBModel(n_estimators=self.n_est, **self.params, **hyperparams)
            model.fit(X=X_trn, y=y_trn,
                      eval_set=[(X_val, y_val)],
                      eval_metric=self.metric,
                      early_stopping_rounds=self.n_stop,
                      verbose=False)
            score = model.evals_result()['validation_0'][self.metric][model.best_iteration] * self.loss_sign

            return {'loss': score, 'status': STATUS_OK, 'model': model}

        trials = Trials()
        best = hyperopt.fmin(fn=objective, space=self.space, trials=trials,
                             algo=tpe.suggest, max_evals=n_eval, verbose=1,
                             rstate=self.random_state)

        hyperparams = space_eval(self.space, best)
        return hyperparams, trials 
Example #5
Source File: bayesian_optimizer.py    From pykg2vec with MIT License 7 votes vote down vote up
def optimize(self):
        """Function that performs bayesian optimization"""
        trials = Trials()

        self._best_result = fmin(fn=self._get_loss, space=self.search_space, trials=trials,
                                 algo=tpe.suggest, max_evals=self.max_evals)
        
        columns = list(self.search_space.keys())   
        results = pd.DataFrame(columns=['iteration'] + columns + ['loss'])
        
        for idx, trial in enumerate(trials.trials):
            row = [idx]
            translated_eval = space_eval(self.search_space, {k: v[0] for k, v in trial['misc']['vals'].items()})
            for k in columns:
                row.append(translated_eval[k])
            row.append(trial['result']['loss'])
            results.loc[idx] = row

        path = self.config_local.path_result / self.model_name
        path.mkdir(parents=True, exist_ok=True)
        results.to_csv(str(path / "trials.csv"), index=False)
        
        self._logger.info(results)
        self._logger.info('Found golden setting:')
        self._logger.info(space_eval(self.search_space, self._best_result)) 
Example #6
Source File: average.py    From fnc-1 with Apache License 2.0 6 votes vote down vote up
def run():

    param_space = {

            'w0': 1.0,
            'w1': hp.quniform('w1', 0.01, 2.0, 0.01),
            'max_evals': 800
            }
    
    
    trial_counter = 0
    trials = Trials()
    objective = lambda p: hyperopt_wrapper(p)
    best_params = fmin(objective, param_space, algo=tpe.suggest,\
        trials = trials, max_evals=param_space["max_evals"])
    
    print 'best parameters: '
    for k, v in best_params.items():
        print "%s: %s" % (k ,v)
    
    trial_loss = np.asarray(trials.losses(), dtype=float)
    best_loss = min(trial_loss)
    print 'best loss: ', best_loss 
Example #7
Source File: hyperopt.py    From lale with Apache License 2.0 6 votes vote down vote up
def __init__(self, estimator=None, max_evals=50, cv=5, handle_cv_failure=False, 
                scoring='accuracy', best_score=0.0, max_opt_time=None, max_eval_time=None, 
                pgo:Optional[PGO]=None, show_progressbar=True, args_to_scorer=None,
                verbose=False):
        self.max_evals = max_evals
        if estimator is None:
            self.estimator = LogisticRegression()
        else:
            self.estimator = estimator
        self.search_space = hp.choice('meta_model', [hyperopt_search_space(self.estimator, pgo=pgo)])
        self.scoring = scoring
        self.best_score = best_score
        self.handle_cv_failure = handle_cv_failure
        self.cv = cv
        self._trials = Trials()
        self.max_opt_time = max_opt_time
        self.max_eval_time = max_eval_time
        self.show_progressbar = show_progressbar
        if args_to_scorer is not None:
            self.args_to_scorer = args_to_scorer
        else:
            self.args_to_scorer = {}
        self.verbose = verbose 
Example #8
Source File: hyperparam_search.py    From robotics-rl-srl with MIT License 6 votes vote down vote up
def run(self):
        trials = hyperopt.Trials()
        hyperopt.fmin(fn=lambda kwargs: {'loss': self.train(kwargs), 'status': hyperopt.STATUS_OK},
                      space=self.search_space,
                      algo=hyperopt.tpe.suggest,
                      max_evals=self.num_eval,
                      trials=trials,
                      verbose=10)

        # from the trials, get the values for every parameter
        # set the number of iter to None as they are not changed in Hyperopt
        # and zip the loss
        self.history.extend(zip([(
            {name: val[0] for name, val in params["misc"]["vals"].items()}, None)
            for params in trials.trials], trials.losses()))
        return self.history[int(np.argmin([val[1] for val in self.history]))] 
Example #9
Source File: test_hyopt.py    From kopt with MIT License 6 votes vote down vote up
def test_compilefn_cross_val(tmpdir):
    db_name = "test"
    exp_name = "test2"
    fn = CompileFN(db_name, exp_name,
                   cv_n_folds=3,
                   stratified=False,
                   random_state=True,
                   data_fn=data.data,
                   model_fn=model.build_model,
                   optim_metric="loss",
                   optim_metric_mode="min",
                   save_dir="/tmp/")
    hyper_params = {
        "data": {},
        "shared": {"max_features": 100, "maxlen": 20},
        "model": {"filters": hp.choice("m_filters", (2, 5)),
                  "hidden_dims": 3,
                  },
        "fit": {"epochs": 1}
    }
    fn_test(fn, hyper_params, tmp_dir=str(tmpdir))
    trials = Trials()
    best = fmin(fn, hyper_params, trials=trials, algo=tpe.suggest, max_evals=2)
    assert isinstance(best, dict) 
Example #10
Source File: bot.py    From ebisu with MIT License 6 votes vote down vote up
def params_search(self):
        """
 ˜      function to search params
        """
        def objective(args):
            logger.info(f"Params : {args}")
            try:
                self.params = args
                self.exchange = BitMexBackTest()
                self.exchange.on_update(self.bin_size, self.strategy)
                profit_factor = self.exchange.win_profit/self.exchange.lose_loss
                logger.info(f"Profit Factor : {profit_factor}")
                ret = {
                    'status': STATUS_OK,
                    'loss': 1/profit_factor
                }
            except Exception as e:
                ret = {
                    'status': STATUS_FAIL
                }

            return ret

        trials = Trials()
        best_params = fmin(objective, self.options(), algo=tpe.suggest, trials=trials, max_evals=200)
        logger.info(f"Best params is {best_params}")
        logger.info(f"Best profit factor is {1/trials.best_trial['result']['loss']}") 
Example #11
Source File: tuner.py    From MatchZoo with Apache License 2.0 6 votes vote down vote up
def tune(self):
        """
        Start tuning.

        Notice that `tune` does not affect the tuner's inner state, so each
        new call to `tune` starts fresh. In other words, hyperspaces are
        suggestive only within the same `tune` call.
        """
        if self.__curr_run_num != 0:
            print(
                """WARNING: `tune` does not affect the tuner's inner state, so
                each new call to `tune` starts fresh. In other words,
                hyperspaces are suggestive only within the same `tune` call."""
            )
        self.__curr_run_num = 0
        logging.getLogger('hyperopt').setLevel(logging.CRITICAL)

        trials = hyperopt.Trials()

        self._fmin(trials)

        return {
            'best': trials.best_trial['result']['mz_result'],
            'trials': [trial['result']['mz_result'] for trial in trials.trials]
        } 
Example #12
Source File: task.py    From HRERE with MIT License 6 votes vote down vote up
def run(self):
        trials = Trials()
        best = fmin(self._obj, self.model_param_space._build_space(),
                tpe.suggest, self.max_evals, trials)
        best_params = space_eval(self.model_param_space._build_space(), best)
        best_params = self.model_param_space._convert_into_param(best_params)
        trial_loss = np.asarray(trials.losses(), dtype=float)
        best_ind = np.argmin(trial_loss)
        best_ap = trial_loss[best_ind]
        best_loss = trials.trial_attachments(trials.trials[best_ind])["loss"]
        best_acc = trials.trial_attachments(trials.trials[best_ind])["acc"]
        self.logger.info("-" * 50)
        self.logger.info("Best Average Precision: %.3f" % best_ap)
        self.logger.info("with Loss %.3f, Accuracy %.3f" % (best_loss, best_acc))
        self.logger.info("Best Param:")
        self.task._print_param_dict(best_params)
        self.logger.info("-" * 50) 
Example #13
Source File: tuner.py    From MatchZoo-py with Apache License 2.0 6 votes vote down vote up
def tune(self):
        """
        Start tuning.

        Notice that `tune` does not affect the tuner's inner state, so each
        new call to `tune` starts fresh. In other words, hyperspaces are
        suggestive only within the same `tune` call.
        """
        if self.__curr_run_num != 0:
            print(
                """WARNING: `tune` does not affect the tuner's inner state, so
                each new call to `tune` starts fresh. In other words,
                hyperspaces are suggestive only within the same `tune` call."""
            )
        self.__curr_run_num = 0
        logging.getLogger('hyperopt').setLevel(logging.CRITICAL)

        trials = hyperopt.Trials()

        self._fmin(trials)

        return {
            'best': trials.best_trial['result']['mz_result'],
            'trials': [trial['result']['mz_result'] for trial in trials.trials]
        } 
Example #14
Source File: autotune.py    From scVI with MIT License 6 votes vote down vote up
def run(self):
        logger_all.debug("Instantiating MongoTrials object.")
        trials = MongoTrials(
            as_mongo_str(os.path.join(self.mongo_url, "jobs")), exp_key=self.exp_key
        )
        logger_all.debug("Calling fmin.")
        fmin(
            fn=self.objective_hyperopt,
            space=self.space,
            algo=self.algo,
            max_evals=self.max_evals,
            trials=trials,
            show_progressbar=self.show_progressbar,
        )
        # queue.put uses pickle so remove attribute containing thread.lock
        if hasattr(trials, "handle"):
            logger_all.debug("fmin returned. Deleting Trial handle for pickling.")
            del trials.handle
        logger_all.debug("Putting Trials in Queue.")
        self.queue.put(trials) 
Example #15
Source File: base_worker.py    From BOAH with Apache License 2.0 5 votes vote down vote up
def run_tpe(self, num_iterations):
        """
            Wrapper around TPE to return a HpBandSter Result object to integrate better with the other methods
        """
        try:
            from hyperopt import fmin, tpe, hp, STATUS_OK, Trials
        except ImportError:
            raise ImportError('To run TPE, please install the hyperopt package!')
        except:
            raise

        def tpe_objective(config):
            loss = self.evaluate_and_log(config, budget=self.max_budget)
            return({    'config': config,
                        'loss': loss,
                        'status': STATUS_OK})




        space = self.tpe_configspace()
        trials = Trials()
        best = fmin(tpe_objective,
                space=space,
                algo=tpe.suggest,
                max_evals=num_iterations,
                trials=trials)
        return(self.get_result()) 
Example #16
Source File: cash.py    From pyodds with MIT License 5 votes vote down vote up
def model_selector(self, max_evals=50):
        trials = Trials()
        best_clf = fmin(self.f, CUMULATIVE_SEARCH_SPACE, algo=tpe.suggest,
                        max_evals=max_evals, trials=trials)
        config = space_eval(CUMULATIVE_SEARCH_SPACE, best_clf)
        print(config)
        return construct_classifier(config) 
Example #17
Source File: optimization_algorithm_base.py    From hypermax with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def convertResultsToTrials(self, hyperparameterSpace, results):
        trials = hyperopt.Trials()

        for resultIndex, result in enumerate(results):
            data = {
                'book_time': datetime.datetime.now(),
                'exp_key': None,
                'misc': {'cmd': ('domain_attachment', 'FMinIter_Domain'),
                         'idxs': {},
                         'tid': resultIndex,
                         'vals': {},
                         'workdir': None},
                'owner': None,
                'refresh_time': datetime.datetime.now(),
                'result': {'loss': result['loss'], 'status': result['status']},
                'spec': None,
                'state': 2,
                'tid': resultIndex,
                'version': 0
            }

            for param in Hyperparameter(hyperparameterSpace).getFlatParameters():
                value = result[param.name]
                if value is not "" and value is not None:
                    if 'enum' in param.config:
                        value = param.config['enum'].index(value)

                    data['misc']['idxs'][param.hyperoptVariableName] = [resultIndex]
                    data['misc']['vals'][param.hyperoptVariableName] = [value]
                else:
                    data['misc']['idxs'][param.hyperoptVariableName] = []
                    data['misc']['vals'][param.hyperoptVariableName] = []

            trials.insert_trial_doc(data)
        return trials 
Example #18
Source File: parameter_search.py    From Multitask4Veracity with MIT License 5 votes vote down vote up
def parameter_search(ntrials, objective_function, fname):

    
    search_space= { 'num_dense_layers': hp.choice('nlayers', [1,2]),
                    'num_dense_units': hp.choice('num_dense', [300, 400,
                                                               500, 600]), 
                    'num_epochs': hp.choice('num_epochs', [50]),
                    'num_lstm_units': hp.choice('num_lstm_units', [100, 200,
                                                                    300]),
                    'num_lstm_layers': hp.choice('num_lstm_layers', [1,2]),
                    'learn_rate': hp.choice('learn_rate', [1e-4, 1e-3]), 
                    'batchsize': hp.choice('batchsize', [32]),
                    'l2reg': hp.choice('l2reg', [ 1e-3])
                 
    }
    
    trials = Trials()
    
    best = fmin(objective_function,
        space=search_space,
        algo=tpe.suggest,
        max_evals=ntrials,
        trials=trials)
    
    params = trials.best_trial['result']['Params']
    
    directory = "output"
    if not os.path.exists(directory):
        os.mkdir(directory)
    
    f = open('output/trials_'+fname+'.txt', "wb")
    pickle.dump(trials, f)
    f.close()
    
    filename = 'output/bestparams_'+fname+'.txt'
    f = open(filename, "wb")
    pickle.dump(params, f)
    f.close()
    
    return params 
Example #19
Source File: bert_with_hypertuning_main.py    From texar-pytorch with Apache License 2.0 5 votes vote down vote up
def run(self, hyperparams: Dict):
        r"""Run the TPE algorithm with hyperparameters  :attr:`hyperparams`

        Args:
            hyperparams: Dict
                The `(key, value)` pairs of hyperparameters along their range of
                values.
        """
        space = {}
        for k, v in hyperparams.items():
            if isinstance(v, dict):
                if v["dtype"] == int:
                    space[k] = hpo.hp.choice(
                        k, range(v["start"], v["end"]))
                else:
                    space[k] = hpo.hp.uniform(k, v["start"], v["end"])
        trials = hpo.Trials()
        hpo.fmin(fn=self.objective_func,
                 space=space,
                 algo=hpo.tpe.suggest,
                 max_evals=3,
                 trials=trials)
        _, best_trial = min((trial["result"]["loss"], trial)
                            for trial in trials.trials)

        # delete all the other models
        for trial in trials.trials:
            if trial is not best_trial:
                shutil.rmtree(trial["result"]["model"]) 
Example #20
Source File: tuner.py    From pytorch-wrapper with MIT License 5 votes vote down vote up
def run(self, trials_load_path=None, trials_save_path=None):
        """
        Initiates the tuning algorithm.

        :param trials_load_path: Path of a Trials object to load at the beginning of the tuning algorithm. If None the
            tuning algorithm will start from scratch.
        :param trials_save_path: Path where to save the Trials object after each iteration. If None the Trials object
            will not be saved.
        :return: A sorted list of tuples [ (loss, {parameters}), ... ].
        """

        self._current_iteration = 0
        self._points = []

        self._trials_save_path = trials_save_path

        if trials_load_path is None:
            self._current_trials_object = Trials()
        else:
            with open(trials_load_path, 'rb') as fr:
                self._current_trials_object = pickle.load(fr)

        _ = fmin(
            fn=self._step_wrapper_fn,
            space=self._hyper_parameter_generators,
            algo=self._algorithm,
            trials=self._current_trials_object,
            max_evals=self._fit_iterations
        )

        self._points.sort(key=lambda x: x[0])

        return self._points 
Example #21
Source File: hyperopt.py    From BTB with MIT License 5 votes vote down vote up
def _hyperopt_tuning_function(algo, scoring_function, tunable_hyperparameters, iterations):
    """Create a tuning function that uses ``HyperOpt``.

    With a given suggesting algorithm from the library ``HyperOpt``, create a tuning
    function that maximize the score, using ``fmin``.

    Args:
        algo (hyperopt.algo):
            Search / Suggest ``HyperOpt`` algorithm to be used with ``fmin`` function.
    """

    minimized_scoring = _make_minimize_function(scoring_function)
    search_space = _search_space_from_dict(tunable_hyperparameters)
    trials = Trials()
    fmin(
        minimized_scoring,
        search_space,
        algo=algo,
        max_evals=iterations,
        trials=trials,
        verbose=False
    )

    # normalize best score to match other tuners
    best_score = -1 * trials.best_trial['result']['loss']

    return best_score 
Example #22
Source File: test_e2e.py    From hyperas with MIT License 5 votes vote down vote up
def test_ensemble():
    X_train, X_test, Y_train, Y_test = data()
    optim.best_ensemble(nb_ensemble_models=2,
                        model=model,
                        data=data,
                        algo=rand.suggest,
                        max_evals=1,
                        trials=Trials(),
                        voting='hard') 
Example #23
Source File: test_e2e.py    From hyperas with MIT License 5 votes vote down vote up
def test_simple():
    X_train, Y_train, X_test, Y_test = data()
    trials = Trials()
    best_run, best_model = optim.minimize(model=model,
                                          data=data,
                                          algo=tpe.suggest,
                                          max_evals=1,
                                          trials=trials,
                                          verbose=False) 
Example #24
Source File: test_lr_plateau.py    From hyperas with MIT License 5 votes vote down vote up
def test_advanced_callbacks():
    X_train, Y_train, X_test, Y_test = data()
    best_run, best_model = optim.minimize(model=create_model,
                                          data=data,
                                          algo=tpe.suggest,
                                          max_evals=1,
                                          trials=Trials(),
                                          verbose=False) 
Example #25
Source File: test_functional_api.py    From hyperas with MIT License 5 votes vote down vote up
def test_functional_api():
    X_train, Y_train, X_test, Y_test = data()
    best_run, best_model = optim.minimize(model=model,
                                          data=data,
                                          algo=tpe.suggest,
                                          max_evals=1,
                                          trials=Trials(),
                                          verbose=False)
    best_run, best_model = optim.minimize(model=model_multi_line_arguments,
                                          data=data,
                                          algo=tpe.suggest,
                                          max_evals=1,
                                          trials=Trials(),
                                          verbose=False) 
Example #26
Source File: task.py    From NFETC with MIT License 5 votes vote down vote up
def run(self):
        trials = Trials()
        best = fmin(self._obj, self.model_param_space._build_space(), tpe.suggest, self.max_evals, trials)
        best_params = space_eval(self.model_param_space._build_space(), best)
        best_params = self.model_param_space._convert_into_param(best_params)
        trial_loss = np.asarray(trials.losses(), dtype=float)
        best_ind = np.argmin(trial_loss)
        best_loss = -trial_loss[best_ind]
        best_pacc = trials.trial_attachments(trials.trials[best_ind])["pacc"]
        # best_eacc = trials.trial_attachments(trials.trials[best_ind])["eacc"]
        self.logger.info("-" * 50)
        self.logger.info("Best Exact Accuracy %.3f with Parital Accuracy %.3f" % (best_loss, best_pacc))
        self.logger.info("Best Param:")
        self.task._print_param_dict(best_params)
        self.logger.info("-" * 50) 
Example #27
Source File: optimize.py    From starsem2018-entity-linking with Apache License 2.0 5 votes vote down vote up
def optimize(training_config, model_config, train_data, dev_data, eval_dataset, logger):
    trials = hy.Trials()
    atexit.register(lambda: wrap_up_optimization(trials, training_config['optimize.save.history'], logger))

    logger.debug("Loading embeddings")
    embedding_matrix, element2idx = utils.load_word_embeddings(model_config['word.embeddings'])
    entities_embedding_matrix, entity2idx, rels_embedding_matrix, rel2idx = utils.load_kb_embeddings(model_config['kb.embeddings'])

    def optimization_trial(sampled_parameters):
        global trials_counter, dev, train
        try:
            logger.info("** Trial: {}/{} ** ".format(trials_counter, training_config['optimize.num.trails']))
            trials_counter += 1
            sampled_parameters['negative.weight.epoch'] = int(sampled_parameters['negative.weight.epoch'])
            model_trial = getattr(models, training_config.get('model.type', "VectorModel"))(parameters={**model_config, **sampled_parameters}, logger=logger)
            model_trial.prepare_model(embedding_matrix=embedding_matrix, element2idx=element2idx,
                                      entities_embedding_matrix=entities_embedding_matrix, entity2idx=entity2idx,
                                      rels_embedding_matrix=rels_embedding_matrix, rel2idx=rel2idx)
            if train is None and dev is None:
                dev = (model_trial.encode_batch(dev_data[:-1]), dev_data[-1])
                train = (model_trial.encode_batch(train_data[:-1]), train_data[-1])

            results = model_trial.train(train, dev=dev,
                                        eval_on_dataset=lambda: eval_dataset.eval(MLLinker(model=model_trial, logger=logger), verbose=False))
            results['actual_loss'] = results['v_loss']
            results['loss'] = 1.0 - results['v_f1']
            return {**results, 'status': hy.STATUS_OK, 'sampled.parameters': sampled_parameters}
        except Exception as ex:
            logger.error(ex)
            return {'loss': -1, 'status': hy.STATUS_FAIL, 'sampled.parameters': sampled_parameters}

    hy.fmin(optimization_trial,
            optimization_space,
            algo=hy.rand.suggest,
            max_evals=training_config['optimize.num.trails'],
            trials=trials, verbose=1) 
Example #28
Source File: hyperopt_tuner.py    From nni with MIT License 5 votes vote down vote up
def update_search_space(self, search_space):
        """
        Update search space definition in tuner by search_space in parameters.

        Will called when first setup experiemnt or update search space in WebUI.

        Parameters
        ----------
        search_space : dict
        """
        self.json = search_space

        search_space_instance = json2space(self.json)
        rstate = np.random.RandomState()
        trials = hp.Trials()
        domain = hp.Domain(None,
                           search_space_instance,
                           pass_expr_memo_ctrl=None)
        algorithm = self._choose_tuner(self.algorithm_name)
        self.rval = hp.FMinIter(algorithm,
                                domain,
                                trials,
                                max_evals=-1,
                                rstate=rstate,
                                verbose=0)
        self.rval.catch_eval_exceptions = False 
Example #29
Source File: gaussian_process.py    From PES-Learn with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def optimize_model(self):
        print("Beginning hyperparameter optimization...")
        print("Trying {} combinations of hyperparameters".format(self.hp_maxit))
        print("Training with {} points (Full dataset contains {} points).".format(self.ntrain, self.n_datapoints))
        print("Using {} training set point sampling.".format(self.sampler))
        print("Errors are root-mean-square error in wavenumbers (cm-1)")
        self.hyperopt_trials = Trials()
        self.itercount = 1  # keep track of hyperopt iterations 
        if self.input_obj.keywords['rseed']:
            rstate = np.random.RandomState(self.input_obj.keywords['rseed'])
        else:
            rstate = None
        best = fmin(self.hyperopt_model,
                    space=self.hyperparameter_space,
                    algo=tpe.suggest,
                    max_evals=self.hp_maxit*2,
                    rstate=rstate, 
                    show_progressbar=False,
                    trials=self.hyperopt_trials)
        hyperopt_complete()
        print("Best performing hyperparameters are:")
        final = space_eval(self.hyperparameter_space, best)
        print(str(sorted(final.items())))
        self.optimal_hyperparameters  = dict(final)
        # obtain final model from best hyperparameters
        print("Fine-tuning final model architecture...")
        self.build_model(self.optimal_hyperparameters, nrestarts=10, maxit=1000)
        print("Final model performance (cm-1):")
        self.test_error = self.vet_model(self.model)
        self.save_model(self.optimal_hyperparameters) 
Example #30
Source File: hyperparam.py    From elephas with MIT License 5 votes vote down vote up
def _minimize(self, dummy_iterator):
        trials = Trials()
        algo = rand.suggest

        elem = next(dummy_iterator)
        import random
        random.seed(elem)
        rand_seed = np.random.randint(elem)

        base_minimizer(model=None, data=None, functions=None, algo=algo, max_evals=self.max_evals,
                       trials=trials, rseed=rand_seed, full_model_string=self.model_string, notebook_name=None,
                       verbose=True, stack=3)
        yield trials