Python skopt.gp_minimize() Examples

The following are 21 code examples of skopt.gp_minimize(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module skopt , or try the search function .
Example #1
Source File: sampling_comparison.py    From scikit-optimize with BSD 3-Clause "New" or "Revised" License 6 votes vote down vote up
def run_measure(initial_point_generator, n_initial_points=10):
    start = time.time()
    # n_repeats must set to a much higher value to obtain meaningful results.
    n_repeats = 1
    res = run(gp_minimize, initial_point_generator,
              n_initial_points=n_initial_points, n_repeats=n_repeats)
    duration = time.time() - start
    # print("%s %s: %.2f s" % (initial_point_generator,
    #                          str(init_point_gen_kwargs),
    #                          duration))
    return res
#############################################################################
# Objective
# =========
#
# The objective of this example is to find one of these minima in as
# few iterations as possible. One iteration is defined as one call
# to the :class:`benchmarks.hart6` function.
#
# We will evaluate each model several times using a different seed for the
# random number generator. Then compare the average performance of these
# models. This makes the comparison more robust against models that get
# "lucky". 
Example #2
Source File: test_plots.py    From scikit-optimize with BSD 3-Clause "New" or "Revised" License 6 votes vote down vote up
def test_evaluate_min_params():
    res = gp_minimize(bench3,
                      [(-2.0, 2.0)],
                      x0=[0.],
                      noise=1e-8,
                      n_calls=8,
                      n_random_starts=3,
                      random_state=1)

    x_min, f_min = expected_minimum(res, random_state=1)
    x_min2, f_min2 = expected_minimum_random_sampling(res,
                                                      n_random_starts=1000,
                                                      random_state=1)
    plots.plot_gaussian_process(res)
    assert _evaluate_min_params(res, params='result') == res.x
    assert _evaluate_min_params(res, params=[1.]) == [1.]
    assert _evaluate_min_params(res, params='expected_minimum',
                                random_state=1) == x_min
    assert _evaluate_min_params(res, params='expected_minimum',
                                n_minimum_search=20,
                                random_state=1) == x_min
    assert _evaluate_min_params(res, params='expected_minimum_random',
                                n_minimum_search=1000,
                                random_state=1) == x_min2 
Example #3
Source File: test_gp_opt.py    From scikit-optimize with BSD 3-Clause "New" or "Revised" License 6 votes vote down vote up
def test_mixed_categoricals(initgen):

    space = Space([
        Categorical(name="x", categories=["1", "2", "3"]),
        Categorical(name="y", categories=[4, 5, 6]),
        Real(name="z", low=1.0, high=5.0)
    ])

    def objective(param_list):
        x = param_list[0]
        y = param_list[1]
        z = param_list[2]
        loss = int(x) + y * z
        return loss

    res = gp_minimize(objective, space, n_calls=12, random_state=1,
                      initial_point_generator=initgen)
    assert res["x"] in [['1', 4, 1.0], ['2', 4, 1.0]] 
Example #4
Source File: test_gp_opt.py    From scikit-optimize with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def test_use_given_estimator():
    """ Test that gp_minimize does not use default estimator if one is passed
    in explicitly. """
    domain = [(1.0, 2.0), (3.0, 4.0)]
    noise_correct = 1e+5
    noise_fake = 1e-10
    estimator = cook_estimator("GP", domain, noise=noise_correct)
    res = gp_minimize(branin, domain, n_calls=1, n_initial_points=1,
                      base_estimator=estimator, noise=noise_fake)

    assert res['models'][-1].noise == noise_correct 
Example #5
Source File: skopt_gp_main.py    From guildai with Apache License 2.0 5 votes vote down vote up
def _suggest_x(dims, x0, y0, random_start, random_state, opts):
    res = skopt.gp_minimize(
        lambda *args: 0,
        dims,
        n_calls=1,
        n_random_starts=1 if random_start else 0,
        x0=x0,
        y0=y0,
        random_state=random_state,
        acq_func=opts["acq-func"],
        kappa=opts["kappa"],
        xi=opts["xi"],
        noise=opts["noise"],
    )
    return res.x_iters[-1], res.random_state 
Example #6
Source File: SearchBayesianSkopt.py    From RecSys2019_DeepLearning_Evaluation with GNU Affero General Public License v3.0 5 votes vote down vote up
def _set_skopt_params(self, n_calls = 70,
                          n_random_starts = 20,
                          n_points = 10000,
                          n_jobs = 1,
                          # noise = 'gaussian',
                          noise = 1e-5,
                          acq_func = 'gp_hedge',
                          acq_optimizer = 'auto',
                          random_state = None,
                          verbose = True,
                          n_restarts_optimizer = 10,
                          xi = 0.01,
                          kappa = 1.96,
                          x0 = None,
                          y0 = None):
        """
        wrapper to change the params of the bayesian optimizator.
        for further details:
        https://scikit-optimize.github.io/#skopt.gp_minimize

        """
        self.n_point = n_points
        self.n_calls = n_calls
        self.n_random_starts = n_random_starts
        self.n_jobs = n_jobs
        self.acq_func = acq_func
        self.acq_optimizer = acq_optimizer
        self.random_state = random_state
        self.n_restarts_optimizer = n_restarts_optimizer
        self.verbose = verbose
        self.xi = xi
        self.kappa = kappa
        self.noise = noise
        self.x0 = x0
        self.y0 = y0 
Example #7
Source File: test_plots.py    From scikit-optimize with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def test_plots_work_without_cat():
    """Basic smoke tests to make sure plotting doesn't crash."""
    SPACE = [
        Integer(1, 20, name='max_depth'),
        Integer(2, 100, name='min_samples_split'),
        Integer(5, 30, name='min_samples_leaf'),
        Integer(1, 30, name='max_features'),
    ]

    def objective(params):
        clf = DecisionTreeClassifier(random_state=3,
                                     **{dim.name: val
                                        for dim, val in zip(SPACE, params)
                                        if dim.name != 'dummy'})
        return -np.mean(cross_val_score(clf, *load_breast_cancer(True)))

    res = gp_minimize(objective, SPACE, n_calls=10, random_state=3)
    plots.plot_convergence(res)
    plots.plot_evaluations(res)
    plots.plot_objective(res)
    plots.plot_objective(res,
                         minimum='expected_minimum')
    plots.plot_objective(res,
                         sample_source='expected_minimum',
                         n_minimum_search=10)
    plots.plot_objective(res, sample_source='result')
    plots.plot_regret(res)

    # TODO: Compare plots to known good results?
    # Look into how matplotlib does this. 
Example #8
Source File: test_utils.py    From scikit-optimize with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def test_expected_minimum_random_sampling():
    res = gp_minimize(bench3,
                      [(-2.0, 2.0)],
                      x0=[0.],
                      noise=1e-8,
                      n_calls=8,
                      n_random_starts=3,
                      random_state=1)

    x_min, f_min = expected_minimum_random_sampling(res, random_state=1)
    x_min2, f_min2 = expected_minimum_random_sampling(res, random_state=1)

    assert f_min <= res.fun  # true since noise ~= 0.0
    assert x_min == x_min2
    assert f_min == f_min2 
Example #9
Source File: test_utils.py    From scikit-optimize with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def test_expected_minimum():
    res = gp_minimize(bench3,
                      [(-2.0, 2.0)],
                      x0=[0.],
                      noise=1e-8,
                      n_calls=8,
                      n_random_starts=3,
                      random_state=1)

    x_min, f_min = expected_minimum(res, random_state=1)
    x_min2, f_min2 = expected_minimum(res, random_state=1)

    assert f_min <= res.fun  # true since noise ~= 0.0
    assert x_min == x_min2
    assert f_min == f_min2 
Example #10
Source File: test_utils.py    From scikit-optimize with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def test_dump_and_load():
    res = gp_minimize(bench3,
                      [(-2.0, 2.0)],
                      x0=[0.],
                      acq_func="LCB",
                      n_calls=2,
                      n_random_starts=0,
                      random_state=1)

    # Test normal dumping and loading
    with tempfile.TemporaryFile() as f:
        dump(res, f)
        f.seek(0)
        res_loaded = load(f)
    check_optimization_results_equality(res, res_loaded)
    assert "func" in res_loaded.specs["args"]

    # Test dumping without objective function
    with tempfile.TemporaryFile() as f:
        dump(res, f, store_objective=False)
        f.seek(0)
        res_loaded = load(f)
    check_optimization_results_equality(res, res_loaded)
    assert not ("func" in res_loaded.specs["args"])

    # Delete the objective function and dump the modified object
    del res.specs["args"]["func"]
    with tempfile.TemporaryFile() as f:
        dump(res, f, store_objective=False)
        f.seek(0)
        res_loaded = load(f)
    check_optimization_results_equality(res, res_loaded)
    assert not ("func" in res_loaded.specs["args"]) 
Example #11
Source File: test_gp_opt.py    From scikit-optimize with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def test_mixed_categoricals2(initgen):
    space = Space([
        Categorical(name="x", categories=["1", "2", "3"]),
        Categorical(name="y", categories=[4, 5, 6])
    ])

    def objective(param_list):
        x = param_list[0]
        y = param_list[1]
        loss = int(x) + y
        return loss

    res = gp_minimize(objective, space, n_calls=12, random_state=1,
                      initial_point_generator=initgen)
    assert res["x"] == ['1', 4] 
Example #12
Source File: test_gp_opt.py    From scikit-optimize with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def test_use_given_estimator_with_max_model_size():
    """ Test that gp_minimize does not use default estimator if one is passed
    in explicitly. """
    domain = [(1.0, 2.0), (3.0, 4.0)]
    noise_correct = 1e+5
    noise_fake = 1e-10
    estimator = cook_estimator("GP", domain, noise=noise_correct)
    res = gp_minimize(branin, domain, n_calls=1, n_initial_points=1,
                      base_estimator=estimator, noise=noise_fake,
                      model_queue_size=1)
    assert len(res['models']) == 1
    assert res['models'][-1].noise == noise_correct 
Example #13
Source File: bayesian_optimization.py    From harmonicConvolutions with MIT License 5 votes vote down vote up
def optimize(n_trials):
   """Run the gp_minimize function"""
   dimensions = [(4, 10),       # batch size
                 (1e-3, 1e-1),  # learning rate
                 (0.7, 1.5),     # std mult
                 (3, 6),        # filter_size
                 (2, 4),        # n_rings
                 (0.5,1.5)]       # phase_preconditioner

   x0 = [5, 1e-2, 1., 5, 2, 1.]

   print gp_minimize(wrapper_function, dimensions, x0=x0, n_calls=n_trials, verbose=True, callback=dump) 
Example #14
Source File: test_gp_opt.py    From scikit-optimize with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def test_gpr_default():
    """Smoke test that gp_minimize does not fail for default values."""
    gp_minimize(branin, ((-5.0, 10.0), (0.0, 15.0)), n_initial_points=1,
                n_calls=2) 
Example #15
Source File: test_gp_opt.py    From scikit-optimize with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def test_n_jobs():
    r_single = gp_minimize(bench3, [(-2.0, 2.0)], acq_optimizer="lbfgs",
                           acq_func="EI", n_calls=2, n_initial_points=1,
                           random_state=1, noise=1e-10)
    r_double = gp_minimize(bench3, [(-2.0, 2.0)], acq_optimizer="lbfgs",
                           acq_func="EI", n_calls=2, n_initial_points=1,
                           random_state=1, noise=1e-10, n_jobs=2)
    assert_array_equal(r_single.x_iters, r_double.x_iters) 
Example #16
Source File: test_gp_opt.py    From scikit-optimize with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def check_minimize(func, y_opt, bounds, acq_optimizer, acq_func,
                   margin, n_calls, n_initial_points=10, init_gen="random"):
    r = gp_minimize(func, bounds, acq_optimizer=acq_optimizer,
                    acq_func=acq_func, n_initial_points=n_initial_points,
                    n_calls=n_calls, random_state=1,
                    initial_point_generator=init_gen,
                    noise=1e-10)
    assert r.fun < y_opt + margin 
Example #17
Source File: utils.py    From fake-voice-detection with Apache License 2.0 5 votes vote down vote up
def optimize_threshold(self, xtrain, ytrain, xval, yval):
        ytrain_pred = self.predict_labels(xtrain, raw_prob=True)
        yval_pred = self.predict_labels(xval, raw_prob=True)
        self.opt_threshold = 0.5
        ytrain_pred_labels = self.get_labels_from_prob(ytrain_pred, threshold=self.opt_threshold)
        yval_pred_labels = self.get_labels_from_prob(yval_pred, threshold=self.opt_threshold)
        train_f1_score = f1_score(ytrain_pred_labels, ytrain)
        val_f1_score = f1_score(yval_pred_labels, yval)
        print(f"train f1 score: {train_f1_score}, val f1 score: {val_f1_score}")

        f1_train_partial = partial(self.get_f1score_for_optimization, y_true=ytrain.copy(), y_pred=ytrain_pred.copy(), ismin=True)
        n_searches = 50
        dim_0 = Real(low=0.2, high=0.8, name='dim_0')
        dimensions = [dim_0]
        search_result = gp_minimize(func=f1_train_partial,
                                    dimensions=dimensions,
                                    acq_func='gp_hedge',  # Expected Improvement.
                                    n_calls=n_searches,
                                    # n_jobs=n_cpu,
                                    verbose=False)

        self.opt_threshold = search_result.x
        if isinstance(self.opt_threshold,list):
            self.opt_threshold = self.opt_threshold[0]
        self.optimum_threshold_filename = f"model_threshold_{'_'.join(str(v) for k, v in model_params.items())}.npy"
        np.save(os.path.join(f"{model_params['model_save_dir']}",self.optimum_threshold_filename), self.opt_threshold)
        train_f1_score = self.get_f1score_for_optimization(self.opt_threshold, y_true=ytrain, y_pred=ytrain_pred)
        val_f1_score = self.get_f1score_for_optimization(self.opt_threshold, y_true=yval, y_pred=yval_pred )
        print(f"optimized train f1 score: {train_f1_score}, optimized val f1 score: {val_f1_score}") 
Example #18
Source File: bench_branin.py    From scikit-optimize with BSD 3-Clause "New" or "Revised" License 4 votes vote down vote up
def run(n_calls=200, n_runs=10, acq_optimizer="lbfgs"):
    bounds = [(-5.0, 10.0), (0.0, 15.0)]
    optimizers = [("gp_minimize", gp_minimize),
                  ("forest_minimize", forest_minimize),
                  ("gbrt_minimize", gbrt_minimize),
                  ("dummy_minimize", dummy_minimize)]

    for name, optimizer in optimizers:
        print(name)
        results = []
        min_func_calls = []
        time_ = 0.0

        for random_state in range(n_runs):
            if name == "gp_minimize":
                res = optimizer(
                    branin, bounds, random_state=random_state, n_calls=n_calls,
                    noise=1e-10, verbose=True, acq_optimizer=acq_optimizer,
                    n_jobs=-1)
            elif name == "dummy_minimize":
                res = optimizer(
                    branin, bounds, random_state=random_state, n_calls=n_calls)
            else:
                res = optimizer(
                    branin, bounds, random_state=random_state, n_calls=n_calls,
                    acq_optimizer=acq_optimizer)
            results.append(res)
            func_vals = np.round(res.func_vals, 3)
            min_func_calls.append(np.argmin(func_vals) + 1)

        optimal_values = [result.fun for result in results]
        mean_optimum = np.mean(optimal_values)
        std = np.std(optimal_values)
        best = np.min(optimal_values)
        print("Mean optimum: " + str(mean_optimum))
        print("Std of optimal values" + str(std))
        print("Best optima:" + str(best))

        mean_fcalls = np.mean(min_func_calls)
        std_fcalls = np.std(min_func_calls)
        best_fcalls = np.min(min_func_calls)
        print("Mean func_calls to reach min: " + str(mean_fcalls))
        print("Std func_calls to reach min: " + str(std_fcalls))
        print("Fastest no of func_calls to reach min: " + str(best_fcalls)) 
Example #19
Source File: bench_ml.py    From scikit-optimize with BSD 3-Clause "New" or "Revised" License 4 votes vote down vote up
def run(n_calls=32, n_runs=1, save_traces=True, n_jobs=1):
    """
    Main function used to run the experiments.

    Parameters
    ----------
    * `n_calls`: int
        Evaluation budget.

    * `n_runs`: int
        Number of times to repeat the optimization in order to average out noise.

    * `save_traces`: bool
        Whether or not to save data collected during optimization

    * `n_jobs`: int
        Number of different repeats of optimization to run in parallel.
    """
    surrogate_minimizers = [gbrt_minimize, forest_minimize, gp_minimize]
    selected_models = sorted(MODELS, key=lambda x: x.__name__)
    selected_datasets = (DATASETS.keys())

    # all the parameter values and objectives collected during execution are stored in list below
    all_data = {}
    for model in selected_models:
        all_data[model] = {}

        for dataset in selected_datasets:
            if not issubclass(model, DATASETS[dataset]):
                continue

            all_data[model][dataset] = {}
            for surrogate_minimizer in surrogate_minimizers:
                print(surrogate_minimizer.__name__, model.__name__, dataset)
                seeds = np.random.randint(0, 2**30, n_runs)
                raw_trace = Parallel(n_jobs=n_jobs)(
                    delayed(evaluate_optimizer)(
                        surrogate_minimizer, model, dataset, n_calls, seed
                    ) for seed in seeds
                )
                all_data[model][dataset][surrogate_minimizer.__name__] = raw_trace

    # convert the model keys to strings so that results can be saved as json
    all_data = {k.__name__: v for k,v in all_data.items()}

    # dump the recorded objective values as json
    if save_traces:
        with open(datetime.now().strftime("%m_%Y_%d_%H_%m_%s")+'.json', 'w') as f:
            json.dump(all_data, f)
    calculate_performance(all_data) 
Example #20
Source File: bench_ml.py    From scikit-optimize with BSD 3-Clause "New" or "Revised" License 4 votes vote down vote up
def evaluate_optimizer(surrogate_minimize, model, dataset, n_calls, random_state):
    """
    Evaluates some estimator for the task of optimization of parameters of some
    model, given limited number of model evaluations.

    Parameters
    ----------
    * `surrogate_minimize`:
        Minimization function from skopt (eg gp_minimize) that is used
        to minimize the objective.
    * `model`: scikit-learn estimator.
        sklearn estimator used for parameter tuning.
    * `dataset`: str
        Name of dataset to train ML model on.
    * `n_calls`: int
        Budget of evaluations
    * `random_state`: seed
        Set the random number generator in numpy.

    Returns
    -------
    * `trace`: list of tuples
        (p, f(p), best), where p is a dictionary of the form "param name":value,
        and f(p) is performance achieved by the model for configuration p
        and best is the best value till that index.
        Such a list contains history of execution of optimization.
    """
    # below seed is necessary for processes which fork at the same time
    # so that random numbers generated in processes are different
    np.random.seed(random_state)
    problem = MLBench(model, dataset, random_state)
    space = problem.space
    dimensions_names = sorted(space)
    dimensions = [space[d][0] for d in dimensions_names]

    def objective(x):
        # convert list of dimension values to dictionary
        x = dict(zip(dimensions_names, x))
        # the result of "evaluate" is accuracy / r^2, which is the more the better
        y = -problem.evaluate(x)
        return y

    # optimization loop
    result = surrogate_minimize(objective, dimensions, n_calls=n_calls, random_state=random_state)
    trace = []
    min_y = np.inf
    for x, y in zip(result.x_iters, result.func_vals):
        min_y = min(y, min_y)
        x_dct = dict(zip(dimensions_names, x))
        trace.append((x_dct, y, min_y))

    print(random_state)
    return trace 
Example #21
Source File: bench_hart6.py    From scikit-optimize with BSD 3-Clause "New" or "Revised" License 4 votes vote down vote up
def run(n_calls=200, n_runs=10, acq_optimizer="lbfgs"):
    bounds = np.tile((0., 1.), (6, 1))
    optimizers = [("gp_minimize", gp_minimize),
                  ("forest_minimize", forest_minimize),
                  ("gbrt_minimize", gbrt_minimize),
                  ("dummy_minimize", dummy_minimize)]

    for name, optimizer in optimizers:
        print(name)
        results = []
        min_func_calls = []

        for random_state in range(n_runs):
            print(random_state)
            if name == "gp_minimize":
                res = optimizer(
                    hart6, bounds, random_state=random_state, n_calls=n_calls,
                    noise=1e-10, n_jobs=-1, acq_optimizer=acq_optimizer,
                    verbose=1)
            elif name == "dummy_minimize":
                res = optimizer(
                    hart6, bounds, random_state=random_state, n_calls=n_calls)
            else:
                res = optimizer(
                    hart6, bounds, random_state=random_state, n_calls=n_calls)
            results.append(res)
            func_vals = np.round(res.func_vals, 3)
            min_func_calls.append(np.argmin(func_vals) + 1)

        optimal_values = [result.fun for result in results]
        mean_optimum = np.mean(optimal_values)
        std = np.std(optimal_values)
        best = np.min(optimal_values)
        print("Mean optimum: " + str(mean_optimum))
        print("Std of optimal values" + str(std))
        print("Best optima:" + str(best))

        mean_fcalls = np.mean(min_func_calls)
        std_fcalls = np.std(min_func_calls)
        best_fcalls = np.min(min_func_calls)
        print("Mean func_calls to reach min: " + str(mean_fcalls))
        print("Std func_calls to reach min: " + str(std_fcalls))
        print("Fastest no of func_calls to reach min: " + str(best_fcalls))