Python scipy.optimize.basinhopping() Examples

The following are 30 code examples of scipy.optimize.basinhopping(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module scipy.optimize , or try the search function .
Example #1
Source File: v2_validation.py    From Attentive-Filtering-Network with MIT License 7 votes vote down vote up
def best_eer(val_scores, utt2len, utt2label, key_list):
    
    def f_neg(threshold):
        ## Scipy tries to minimize the function
        return utt_eer(val_scores, utt2len, utt2label, key_list, threshold)
    
    # Initialization of best threshold search
    thr_0 = [0.20] * 1 # binary class
    constraints = [(0.,1.)] * 1 # binary class
    def bounds(**kwargs):
        x = kwargs["x_new"]
        tmax = bool(np.all(x <= 1))
        tmin = bool(np.all(x >= 0))
        return tmax and tmin

    # Search using L-BFGS-B, the epsilon step must be big otherwise there is no gradient
    minimizer_kwargs = {"method": "L-BFGS-B",
                        "bounds":constraints,
                        "options":{
                            "eps": 0.05
                            }
                       }

    # We combine L-BFGS-B with Basinhopping for stochastic search with random steps
    logger.info("===> Searching optimal threshold for each label")
    start_time = timer()

    opt_output = basinhopping(f_neg, thr_0,
                                stepsize = 0.1,
                                minimizer_kwargs=minimizer_kwargs,
                                niter=10,
                                accept_test=bounds)

    end_time = timer()
    logger.info("===> Optimal threshold for each label:\n{}".format(opt_output.x))
    logger.info("Threshold found in: %s seconds" % (end_time - start_time))

    score = opt_output.fun
    return score, opt_output.x 
Example #2
Source File: test__basinhopping.py    From GraphicDesignPatternByPython with MIT License 6 votes vote down vote up
def test_all_nograd_minimizers(self):
        # test 2d minimizations without gradient.  Newton-CG requires jac=True,
        # so not included here.
        i = 1
        methods = ['CG', 'BFGS', 'L-BFGS-B', 'TNC', 'SLSQP',
                   'Nelder-Mead', 'Powell', 'COBYLA']
        minimizer_kwargs = copy.copy(self.kwargs_nograd)
        for method in methods:
            minimizer_kwargs["method"] = method
            res = basinhopping(func2d_nograd, self.x0[i],
                               minimizer_kwargs=minimizer_kwargs,
                               niter=self.niter, disp=self.disp)
            tol = self.tol
            if method == 'COBYLA':
                tol = 2
            assert_almost_equal(res.x, self.sol[i], decimal=tol) 
Example #3
Source File: test__basinhopping.py    From GraphicDesignPatternByPython with MIT License 6 votes vote down vote up
def test_seed_reproducibility(self):
        # seed should ensure reproducibility between runs
        minimizer_kwargs = {"method": "L-BFGS-B", "jac": True}

        f_1 = []

        def callback(x, f, accepted):
            f_1.append(f)

        basinhopping(func2d, [1.0, 1.0], minimizer_kwargs=minimizer_kwargs,
                     niter=10, callback=callback, seed=10)

        f_2 = []

        def callback2(x, f, accepted):
            f_2.append(f)

        basinhopping(func2d, [1.0, 1.0], minimizer_kwargs=minimizer_kwargs,
                     niter=10, callback=callback2, seed=10)
        assert_equal(np.array(f_1), np.array(f_2)) 
Example #4
Source File: v1_validation.py    From Attentive-Filtering-Network with MIT License 6 votes vote down vote up
def best_eer(true_labels, predictions):

    def f_neg(threshold):
        ## Scipy tries to minimize the function
        return compute_eer(true_labels, predictions >= threshold)

    # Initialization of best threshold search
    thr_0 = [0.20] * 1 # binary class
    constraints = [(0.,1.)] * 1 # binary class
    def bounds(**kwargs):
        x = kwargs["x_new"]
        tmax = bool(np.all(x <= 1))
        tmin = bool(np.all(x >= 0))
        return tmax and tmin

    # Search using L-BFGS-B, the epsilon step must be big otherwise there is no gradient
    minimizer_kwargs = {"method": "L-BFGS-B",
                        "bounds":constraints,
                        "options":{
                            "eps": 0.05
                            }
                       }

    # We combine L-BFGS-B with Basinhopping for stochastic search with random steps
    logger.info("===> Searching optimal threshold for each label")
    start_time = timer()

    opt_output = basinhopping(f_neg, thr_0,
                                stepsize = 0.1,
                                minimizer_kwargs=minimizer_kwargs,
                                niter=10,
                                accept_test=bounds)

    end_time = timer()
    logger.info("===> Optimal threshold for each label:\n{}".format(opt_output.x))
    logger.info("Threshold found in: %s seconds" % (end_time - start_time))

    score = opt_output.fun
    return score, opt_output.x 
Example #5
Source File: test__basinhopping.py    From GraphicDesignPatternByPython with MIT License 6 votes vote down vote up
def test_jac(self):
        # test jacobian returned
        minimizer_kwargs = self.kwargs.copy()
        # BFGS returns a Jacobian
        minimizer_kwargs["method"] = "BFGS"

        res = basinhopping(func2d_easyderiv, [0.0, 0.0],
                           minimizer_kwargs=minimizer_kwargs, niter=self.niter,
                           disp=self.disp)

        assert_(hasattr(res.lowest_optimization_result, "jac"))

        # in this case, the jacobian is just [df/dx, df/dy]
        _, jacobian = func2d_easyderiv(res.x)
        assert_almost_equal(res.lowest_optimization_result.jac, jacobian,
                            self.tol) 
Example #6
Source File: test__basinhopping.py    From Computable with MIT License 6 votes vote down vote up
def test_TypeError(self):
        # test the TypeErrors are raised on bad input
        i = 1
        # if take_step is passed, it must be callable
        self.assertRaises(TypeError, basinhopping, func2d, self.x0[i],
                          take_step=1)
        # if accept_test is passed, it must be callable
        self.assertRaises(TypeError, basinhopping, func2d, self.x0[i],
                          accept_test=1)
        # accept_test must return bool or string "force_accept"

        def bad_accept_test1(*args, **kwargs):
            return 1

        def bad_accept_test2(*args, **kwargs):
            return "not force_accept"
        self.assertRaises(ValueError, basinhopping, func2d, self.x0[i],
                          minimizer_kwargs=self.kwargs,
                          accept_test=bad_accept_test1)
        self.assertRaises(ValueError, basinhopping, func2d, self.x0[i],
                          minimizer_kwargs=self.kwargs,
                          accept_test=bad_accept_test2) 
Example #7
Source File: test_lml_optimizer.py    From CatLearn with GNU General Public License v3.0 5 votes vote down vote up
def lml_opt(train_features, train_targets, test_features,
            kernel_list, regularization,
            global_opt=False, algomin='L-BFGS-B', eval_jac=True):
    """Test Gaussian process predictions."""
    # Test prediction routine with linear kernel.
    N, N_D = np.shape(train_features)
    regularization_bounds = (1e-3, None)
    kdict, bounds = prepare_kernels(kernel_list, regularization_bounds,
                                    eval_gradients, N_D)
    print(bounds)
    # Create a list of all hyperparameters.
    theta = kdicts2list(kdict, N_D=N_D)
    theta = np.append(theta, regularization)
    # Define fixed arguments for log_marginal_likelihood
    args = (np.array(train_features), np.array(train_targets),
            kdict, scale_optimizer, eval_gradients, None, eval_jac)
    # Optimize
    if not global_opt:
        popt = minimize(lml.log_marginal_likelihood, theta,
                        args=args,
                        method=algomin,
                        jac=eval_jac,
                        options={'disp': True},
                        bounds=bounds)
    else:
        minimizer_kwargs = {'method': algomin, 'args': args,
                            'bounds': bounds, 'jac': eval_jac}
        popt = basinhopping(lml.log_marginal_likelihood, theta, niter=10,
                            minimizer_kwargs=minimizer_kwargs, disp=True)
    return popt 
Example #8
Source File: test__basinhopping.py    From GraphicDesignPatternByPython with MIT License 5 votes vote down vote up
def test_pass_takestep(self):
        # test that passing a custom takestep works
        # also test that the stepsize is being adjusted
        takestep = MyTakeStep1()
        initial_step_size = takestep.stepsize
        i = 1
        res = basinhopping(func2d, self.x0[i], minimizer_kwargs=self.kwargs,
                           niter=self.niter, disp=self.disp,
                           take_step=takestep)
        assert_almost_equal(res.x, self.sol[i], self.tol)
        assert_(takestep.been_called)
        # make sure that the built in adaptive step size has been used
        assert_(initial_step_size != takestep.stepsize) 
Example #9
Source File: test__basinhopping.py    From GraphicDesignPatternByPython with MIT License 5 votes vote down vote up
def test_pass_simple_takestep(self):
        # test that passing a custom takestep without attribute stepsize
        takestep = myTakeStep2
        i = 1
        res = basinhopping(func2d_nograd, self.x0[i],
                           minimizer_kwargs=self.kwargs_nograd,
                           niter=self.niter, disp=self.disp,
                           take_step=takestep)
        assert_almost_equal(res.x, self.sol[i], self.tol) 
Example #10
Source File: test__basinhopping.py    From GraphicDesignPatternByPython with MIT License 5 votes vote down vote up
def test_minimizer_fail(self):
        # test if a minimizer fails
        i = 1
        self.kwargs["options"] = dict(maxiter=0)
        self.niter = 10
        res = basinhopping(func2d, self.x0[i], minimizer_kwargs=self.kwargs,
                           niter=self.niter, disp=self.disp)
        # the number of failed minimizations should be the number of
        # iterations + 1
        assert_equal(res.nit + 1, res.minimization_failures) 
Example #11
Source File: test__basinhopping.py    From GraphicDesignPatternByPython with MIT License 5 votes vote down vote up
def test_niter_zero(self):
        # gh5915, what happens if you call basinhopping with niter=0
        i = 0
        basinhopping(func1d, self.x0[i], minimizer_kwargs=self.kwargs,
                     niter=0, disp=self.disp) 
Example #12
Source File: optimizer.py    From vnpy_crypto with MIT License 5 votes vote down vote up
def _fit_basinhopping(f, score, start_params, fargs, kwargs, disp=True,
                          maxiter=100, callback=None, retall=False,
                          full_output=True, hess=None):
    if not 'basinhopping' in vars(optimize):
        msg = 'basinhopping solver is not available, use e.g. bfgs instead!'
        raise ValueError(msg)

    from copy import copy
    kwargs = copy(kwargs)
    niter = kwargs.setdefault('niter', 100)
    niter_success = kwargs.setdefault('niter_success', None)
    T = kwargs.setdefault('T', 1.0)
    stepsize = kwargs.setdefault('stepsize', 0.5)
    interval = kwargs.setdefault('interval', 50)
    minimizer_kwargs = kwargs.get('minimizer', {})
    minimizer_kwargs['args'] = fargs
    minimizer_kwargs['jac'] = score
    method = minimizer_kwargs.get('method', None)
    if method and method != 'L-BFGS-B': # l_bfgs_b doesn't take a hessian
        minimizer_kwargs['hess'] = hess

    retvals = optimize.basinhopping(f, start_params,
                                    minimizer_kwargs=minimizer_kwargs,
                                    niter=niter, niter_success=niter_success,
                                    T=T, stepsize=stepsize, disp=disp,
                                    callback=callback, interval=interval)
    if full_output:
        xopt, fopt, niter, fcalls = map(lambda x : getattr(retvals, x),
                                        ['x', 'fun', 'nit', 'nfev'])
        converged = 'completed successfully' in retvals.message[0]
        retvals = {'fopt': fopt, 'iterations': niter,
                   'fcalls': fcalls, 'converged': converged}

    else:
        xopt = retvals.x
        retvals = None

    return xopt, retvals 
Example #13
Source File: test__basinhopping.py    From GraphicDesignPatternByPython with MIT License 5 votes vote down vote up
def test_monotonic_basin_hopping(self):
        # test 1d minimizations with gradient and T=0
        i = 0
        res = basinhopping(func1d, self.x0[i], minimizer_kwargs=self.kwargs,
                           niter=self.niter, disp=self.disp, T=0)
        assert_almost_equal(res.x, self.sol[i], self.tol) 
Example #14
Source File: test__basinhopping.py    From GraphicDesignPatternByPython with MIT License 5 votes vote down vote up
def test_boolean_return(self):
        # the return must be a bool.  else an error will be raised in
        # basinhopping
        ret = self.met(f_new=0., f_old=1.)
        assert isinstance(ret, bool) 
Example #15
Source File: test__basinhopping.py    From GraphicDesignPatternByPython with MIT License 5 votes vote down vote up
def test_pass_accept_test(self):
        # test passing a custom accept test
        # makes sure it's being used and ensures all the possible return values
        # are accepted.
        accept_test = MyAcceptTest()
        i = 1
        # there's no point in running it more than a few steps.
        basinhopping(func2d, self.x0[i], minimizer_kwargs=self.kwargs,
                     niter=10, disp=self.disp, accept_test=accept_test)
        assert_(accept_test.been_called) 
Example #16
Source File: api.py    From dl2 with MIT License 5 votes vote down vote up
def basinhopping(constraint_solve, constraint_check, variables, bounds, args):
    x0, shapes, shapes_flat = vars_to_x(variables)
    
    def loss_fn(x):
        x_to_vars(x, variables, shapes_flat, shapes)
        return constraint_solve.to_diffsat(cache=True).loss(args)

    def local_optimization_step(fun, x0, *losargs, **loskwargs):
        loss_before = loss_fn(x0)
        inner_opt(constraint_solve, constraint_check, variables, bounds, args)
        r = spo.OptimizeResult()
        r.x, _, _ = vars_to_x(variables)
        loss_after = constraint_solve.to_diffsat(cache=True).loss(args)
        r.success = not (loss_before == loss_after and not constraint_check.to_diffsat(cache=True).satisfy(args))
        r.fun = loss_after
        return r

    def check_basinhopping(x, f, accept):
        if abs(f) <= 10 * args.eps_check:
            x_, _, _ = vars_to_x(variables)
            x_to_vars(x, variables, shapes_flat, shapes)
            if constraint_check.to_diffsat(cache=True).satisfy(args):
                return True
            else:
                x_to_vars(x_, variables, shapes_flat, shapes)
        return False
    
    minimizer_kwargs = {}
    minimizer_kwargs['method'] = local_optimization_step

    satisfied = constraint_check.to_diffsat(cache=True).satisfy(args)
    if satisfied:
        return True
    spo.basinhopping(loss_fn, x0, niter=1000, minimizer_kwargs=minimizer_kwargs, callback=check_basinhopping,
                     T=args.basinhopping_T, stepsize=args.basinhopping_stepsize)
    return constraint_check.to_diffsat(cache=True).satisfy(args) 
Example #17
Source File: api.py    From dl2 with MIT License 5 votes vote down vote up
def solve(constraint, args, return_values=None):
    def solve_(constraint, args, return_values=None):
        t0 = time.time()
        if constraint is not None:
            constraint_s, variables, bounds = simplify(constraint, args)
            if args.use_basinhopping:
                satisfied = basinhopping(constraint_s, constraint, variables, bounds, args)
            else:
                satisfied = inner_opt(constraint_s, constraint, variables, bounds, args)
        else:
            satisfied = True

        if return_values is None:
            if constraint is not None:
                variables = list(set(constraint.get_variables()))
                ret = dict([(v.name, v.tensor.detach().cpu().numpy()) for v in variables])
            else:
                ret = dict()
        else:
            ret = [(str(r), r.to_diffsat(cache=True).detach().cpu().numpy()) for r in return_values]
            if len(ret) == 1:
                ret = ret[0][1]
            else:
                ret = dict(ret)
        t1 = time.time()
        return satisfied, ret, t1 - t0

    def timeout(signum, frame):
        raise TimeoutException()

    signal.signal(signal.SIGALRM, timeout)
    signal.alarm(args.timeout)
    try:
        solved, results, t = solve_(constraint, args, return_values=None)
    except TimeoutException:
        solved, results, t = False, None, args.timeout
    signal.alarm(0)  # cancel alarms
    torch.cuda.empty_cache()
    return solved, results, t 
Example #18
Source File: optimizer.py    From Splunking-Crime with GNU Affero General Public License v3.0 5 votes vote down vote up
def _fit_basinhopping(f, score, start_params, fargs, kwargs, disp=True,
                          maxiter=100, callback=None, retall=False,
                          full_output=True, hess=None):
    if not 'basinhopping' in vars(optimize):
        msg = 'basinhopping solver is not available, use e.g. bfgs instead!'
        raise ValueError(msg)

    from copy import copy
    kwargs = copy(kwargs)
    niter = kwargs.setdefault('niter', 100)
    niter_success = kwargs.setdefault('niter_success', None)
    T = kwargs.setdefault('T', 1.0)
    stepsize = kwargs.setdefault('stepsize', 0.5)
    interval = kwargs.setdefault('interval', 50)
    minimizer_kwargs = kwargs.get('minimizer', {})
    minimizer_kwargs['args'] = fargs
    minimizer_kwargs['jac'] = score
    method = minimizer_kwargs.get('method', None)
    if method and method != 'L-BFGS-B': # l_bfgs_b doesn't take a hessian
        minimizer_kwargs['hess'] = hess

    retvals = optimize.basinhopping(f, start_params,
                                    minimizer_kwargs=minimizer_kwargs,
                                    niter=niter, niter_success=niter_success,
                                    T=T, stepsize=stepsize, disp=disp,
                                    callback=callback, interval=interval)
    if full_output:
        xopt, fopt, niter, fcalls = map(lambda x : getattr(retvals, x),
                                        ['x', 'fun', 'nit', 'nfev'])
        converged = 'completed successfully' in retvals.message[0]
        retvals = {'fopt': fopt, 'iterations': niter,
                   'fcalls': fcalls, 'converged': converged}

    else:
        xopt = retvals.x
        retvals = None

    return xopt, retvals 
Example #19
Source File: tests.py    From dynamo-release with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def test_Wang_LAP():
    """Test the least action path method from Jin Wang and colleagues (http://www.pnas.org/cgi/doi/10.1073/pnas.1017017108)

	Returns
	-------

	"""
    x1_end = 1
    x2_end = 0
    x2_init = 1.5
    x1_init = 1.5
    N = 20

    x1_input = np.arange(
        x1_init, x1_end + (x1_end - x1_init) / N, (x1_end - x1_init) / N
    )
    x2_input = np.arange(
        x2_init, x2_end + (x2_end - x2_init) / N, (x2_end - x2_init) / N
    )
    X_input = np.vstack((x1_input, x2_input))

    dyn.tl.least_action(X_input, F=F, D=0.1, N=20, lamada_=1)
    res = optimize.basinhopping(
        dyn.tl.least_action, x0=X_input, minimizer_kwargs={"args": (2, F, 0.1, 20, 1)}
    )
    res 
Example #20
Source File: Wang.py    From dynamo-release with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def Wang_LAP(F, n_points, point_start, point_end, D=0.1, lambda_=1):
    """Calculating least action path based methods from Jin Wang and colleagues (http://www.pnas.org/cgi/doi/10.1073/pnas.1017017108)

    Parameters
    ----------
        F: `Function`
            The reconstructed vector field function
        n_points: 'int'
            The number of points along the least action path.
        point_start: 'np.ndarray'
            The matrix for storing the coordinates (gene expression configuration) of the start point (initial cell state).
        point_end: 'np.ndarray'
            The matrix for storing the coordinates (gene expression configuration) of the end point (terminal cell state).
        D: `float`
            The diffusion constant. Note that this can be a space-dependent matrix.
        lamada_: `float`
            Regularization parameter

    Returns
    -------
        The least action path and the action way of the inferred path.
    """
    initpath = point_start.dot(np.ones((1, n_points + 1))) + (
        point_end - point_start
    ).dot(np.linspace(0, 1, n_points + 1, endpoint=True).reshape(1, -1))

    dim, N = initpath.shape
    # update this optimization method
    res = optimize.basinhopping(
        Wang_action, x0=initpath, minimizer_kwargs={"args": (F, D, dim, N, lambda_)}
    )

    return res 
Example #21
Source File: test_minimize.py    From symfit with GNU General Public License v2.0 5 votes vote down vote up
def test_basinhopping():
    def func(x):
        return np.cos(14.5 * x - 0.3) + (x + 0.2) * x
    x0 = [1.]
    np.random.seed(555)
    res = basinhopping(func, x0, minimizer_kwargs={"method": "BFGS"}, niter=200)
    np.random.seed(555)
    x, = parameters('x')
    fit = BasinHopping(func, [x], local_minimizer=BFGS)
    fit_result = fit.execute(niter=200)
    # fit_result = fit.execute(minimizer_kwargs={"method": "BFGS"}, niter=200)

    assert res.x == fit_result.value(x)
    assert res.fun == fit_result.objective_value 
Example #22
Source File: numeric_solver.py    From geosolver with Apache License 2.0 5 votes vote down vote up
def find_assignment(variable_handler, atoms, max_num_resets, tol, verbose=True):
    init = variable_handler.dict_to_vector()

    def func(vector):
        return sum(evaluate(atom, variable_handler.vector_to_dict(vector)).norm for atom in atoms)

    xs = []
    fs = []
    options = {'ftol': tol**2}
    minimizer_kwargs = {"method": "SLSQP", "options": options}
    for i in range(max_num_resets):
        result = basinhopping(func, init, minimizer_kwargs=minimizer_kwargs)
        if verbose:
            print("iteration %d:" % (i+1))
            print(result)
        xs.append(result.x)
        fs.append(result.fun)
        if result.fun < tol:
            break
        init = np.random.rand(len(init))

    min_idx = min(enumerate(fs), key=lambda pair: pair[1])[0]
    assignment = variable_handler.vector_to_dict(xs[min_idx])
    norm = fs[min_idx]
    return assignment, norm 
Example #23
Source File: test__basinhopping.py    From GraphicDesignPatternByPython with MIT License 5 votes vote down vote up
def test_all_minimizers(self):
        # test 2d minimizations with gradient.  Nelder-Mead, Powell and COBYLA
        # don't accept jac=True, so aren't included here.
        i = 1
        methods = ['CG', 'BFGS', 'Newton-CG', 'L-BFGS-B', 'TNC', 'SLSQP']
        minimizer_kwargs = copy.copy(self.kwargs)
        for method in methods:
            minimizer_kwargs["method"] = method
            res = basinhopping(func2d, self.x0[i],
                               minimizer_kwargs=minimizer_kwargs,
                               niter=self.niter, disp=self.disp)
            assert_almost_equal(res.x, self.sol[i], self.tol) 
Example #24
Source File: test__basinhopping.py    From Computable with MIT License 5 votes vote down vote up
def test_1d_grad(self):
        # test 1d minimizations with gradient
        i = 0
        res = basinhopping(func1d, self.x0[i], minimizer_kwargs=self.kwargs,
                           niter=self.niter, disp=self.disp)
        assert_almost_equal(res.x, self.sol[i], self.tol) 
Example #25
Source File: test__basinhopping.py    From Computable with MIT License 5 votes vote down vote up
def test_2d(self):
        # test 2d minimizations with gradient
        i = 1
        res = basinhopping(func2d, self.x0[i], minimizer_kwargs=self.kwargs,
                           niter=self.niter, disp=self.disp)
        assert_almost_equal(res.x, self.sol[i], self.tol)
        self.assertTrue(res.nfev > 0) 
Example #26
Source File: test__basinhopping.py    From Computable with MIT License 5 votes vote down vote up
def test_njev(self):
        # test njev is returned correctly
        i = 1
        minimizer_kwargs = self.kwargs.copy()
        # L-BFGS-B doesn't use njev, but BFGS does
        minimizer_kwargs["method"] = "BFGS"
        res = basinhopping(func2d, self.x0[i],
                           minimizer_kwargs=minimizer_kwargs, niter=self.niter,
                           disp=self.disp)
        self.assertTrue(res.nfev > 0)
        self.assertEqual(res.nfev, res.njev) 
Example #27
Source File: test__basinhopping.py    From Computable with MIT License 5 votes vote down vote up
def test_2d_nograd(self):
        # test 2d minimizations without gradient
        i = 1
        res = basinhopping(func2d_nograd, self.x0[i],
                           minimizer_kwargs=self.kwargs_nograd,
                           niter=self.niter, disp=self.disp)
        assert_almost_equal(res.x, self.sol[i], self.tol) 
Example #28
Source File: test__basinhopping.py    From Computable with MIT License 5 votes vote down vote up
def test_pass_takestep(self):
        # test that passing a custom takestep works
        # also test that the stepsize is being adjusted
        takestep = MyTakeStep1()
        initial_step_size = takestep.stepsize
        i = 1
        res = basinhopping(func2d, self.x0[i], minimizer_kwargs=self.kwargs,
                           niter=self.niter, disp=self.disp,
                           take_step=takestep)
        assert_almost_equal(res.x, self.sol[i], self.tol)
        assert_(takestep.been_called)
        # make sure that the built in adaptive step size has been used
        assert_(initial_step_size != takestep.stepsize) 
Example #29
Source File: test__basinhopping.py    From Computable with MIT License 5 votes vote down vote up
def test_pass_simple_takestep(self):
        # test that passing a custom takestep without attribute stepsize
        takestep = myTakeStep2
        i = 1
        res = basinhopping(func2d_nograd, self.x0[i],
                           minimizer_kwargs=self.kwargs_nograd,
                           niter=self.niter, disp=self.disp,
                           take_step=takestep)
        assert_almost_equal(res.x, self.sol[i], self.tol) 
Example #30
Source File: test__basinhopping.py    From Computable with MIT License 5 votes vote down vote up
def test_pass_accept_test(self):
        # test passing a custom accept test
        # makes sure it's being used and ensures all the possible return values
        # are accepted.
        accept_test = MyAcceptTest()
        i = 1
        #there's no point in running it more than a few steps.
        res = basinhopping(func2d, self.x0[i], minimizer_kwargs=self.kwargs,
                           niter=10, disp=self.disp, accept_test=accept_test)
        assert_(accept_test.been_called)