Python scipy.optimize.rosen() Examples

The following are 28 code examples of scipy.optimize.rosen(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module scipy.optimize , or try the search function .
Example #1
Source File: test_optimize.py    From GraphicDesignPatternByPython with MIT License 6 votes vote down vote up
def test_l_bfgs_b_maxiter(self):
        # gh7854
        # Ensure that not more than maxiters are ever run.
        class Callback(object):
            def __init__(self):
                self.nit = 0
                self.fun = None
                self.x = None

            def __call__(self, x):
                self.x = x
                self.fun = optimize.rosen(x)
                self.nit += 1

        c = Callback()
        res = optimize.minimize(optimize.rosen, [0., 0.], method='l-bfgs-b',
                                callback=c, options={'maxiter': 5})

        assert_equal(res.nit, 5)
        assert_almost_equal(res.x, c.x)
        assert_almost_equal(res.fun, c.fun)
        assert_equal(res.status, 1)
        assert_(res.success is False)
        assert_equal(res.message.decode(), 'STOP: TOTAL NO. of ITERATIONS REACHED LIMIT') 
Example #2
Source File: test_optimize.py    From GraphicDesignPatternByPython with MIT License 6 votes vote down vote up
def test_minimize_l_bfgs_b_maxfun_interruption(self):
        # gh-6162
        f = optimize.rosen
        g = optimize.rosen_der
        values = []
        x0 = np.ones(7) * 1000

        def objfun(x):
            value = f(x)
            values.append(value)
            return value

        # Look for an interesting test case.
        # Request a maxfun that stops at a particularly bad function
        # evaluation somewhere between 100 and 300 evaluations.
        low, medium, high = 30, 100, 300
        optimize.fmin_l_bfgs_b(objfun, x0, fprime=g, maxfun=high)
        v, k = max((y, i) for i, y in enumerate(values[medium:]))
        maxfun = medium + k
        # If the minimization strategy is reasonable,
        # the minimize() result should not be worse than the best
        # of the first 30 function evaluations.
        target = min(values[:low])
        xmin, fmin, d = optimize.fmin_l_bfgs_b(f, x0, fprime=g, maxfun=maxfun)
        assert_array_less(fmin, target) 
Example #3
Source File: test_sample.py    From pyPESTO with BSD 3-Clause "New" or "Revised" License 6 votes vote down vote up
def rosenbrock_problem():
    """Problem based on rosenbrock objective.

    Features
    --------
    * 3-dim
    * has fixed parameters
    """
    objective = pypesto.Objective(fun=so.rosen)

    dim_full = 2
    lb = -5 * np.ones((dim_full, 1))
    ub = 5 * np.ones((dim_full, 1))

    problem = pypesto.Problem(
            objective=objective, lb=lb, ub=ub,
            x_fixed_indices=[1], x_fixed_vals=[2])
    return problem 
Example #4
Source File: test__differential_evolution.py    From GraphicDesignPatternByPython with MIT License 6 votes vote down vote up
def test_bounds_checking(self):
        # test that the bounds checking works
        func = rosen
        bounds = [(-3, None)]
        assert_raises(ValueError,
                          differential_evolution,
                          func,
                          bounds)
        bounds = [(-3)]
        assert_raises(ValueError,
                          differential_evolution,
                          func,
                          bounds)
        bounds = [(-3, 3), (3, 4, 5)]
        assert_raises(ValueError,
                          differential_evolution,
                          func,
                          bounds) 
Example #5
Source File: test__differential_evolution.py    From GraphicDesignPatternByPython with MIT License 6 votes vote down vote up
def test_iteration(self):
        # test that DifferentialEvolutionSolver is iterable
        # if popsize is 3 then the overall generation has size (6,)
        solver = DifferentialEvolutionSolver(rosen, self.bounds, popsize=3,
                                             maxfun=12)
        x, fun = next(solver)
        assert_equal(np.size(x, 0), 2)

        # 6 nfev are required for initial calculation of energies, 6 nfev are
        # required for the evolution of the 6 population members.
        assert_equal(solver._nfev, 12)

        # the next generation should halt because it exceeds maxfun
        assert_raises(StopIteration, next, solver)

        # check a proper minimisation can be done by an iterable solver
        solver = DifferentialEvolutionSolver(rosen, self.bounds)
        for i, soln in enumerate(solver):
            x_current, fun_current = soln
            # need to have this otherwise the solver would never stop.
            if i == 1000:
                break

        assert_almost_equal(fun_current, 0) 
Example #6
Source File: devo_numpy.py    From compas with MIT License 5 votes vote down vote up
def f(u, *args):
        return rosen(u.ravel()) 
Example #7
Source File: go_funcs_R.py    From pymoo with Apache License 2.0 5 votes vote down vote up
def fun(self, x, *args):
        self.nfev += 1

        return rosen(x) 
Example #8
Source File: descent_numpy.py    From compas with MIT License 5 votes vote down vote up
def f(u, *args):
        return rosen(u.ravel()) 
Example #9
Source File: test_visualize.py    From pyPESTO with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def create_problem():
    # define a pypesto objective
    objective = pypesto.Objective(fun=so.rosen,
                                  grad=so.rosen_der,
                                  hess=so.rosen_hess)

    # define a pypesto problem
    (lb, ub) = create_bounds()
    problem = pypesto.Problem(objective=objective, lb=lb, ub=ub)

    return problem 
Example #10
Source File: test_objective.py    From pyPESTO with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def rosen_for_sensi(max_sensi_order, integrated=False, x=None):
    """
    Rosenbrock function from scipy.optimize.
    """
    if x is None:
        x = [0, 1]

    return obj_for_sensi(so.rosen,
                         so.rosen_der,
                         so.rosen_hess,
                         max_sensi_order, integrated, x) 
Example #11
Source File: test_optimization.py    From chumpy with MIT License 5 votes vote down vote up
def compute_r(self):
        
        result = np.array((rosen(self.x.r) ))
        
        return result 
Example #12
Source File: test_short.py    From psopy with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def test_unconstrained(self):
        """Test against the Rosenbrock function."""

        x0 = np.random.uniform(0, 2, (1000, 5))
        sol = np.array([1., 1., 1., 1., 1.])
        res = minimize(rosen, x0)
        converged = res.success
        assert converged, res.message
        np.testing.assert_array_almost_equal(sol, res.x, 3) 
Example #13
Source File: test_nlopt_optimizers.py    From qiskit-aqua with Apache License 2.0 5 votes vote down vote up
def _optimize(self, optimizer):
        x_0 = [1.3, 0.7, 0.8, 1.9, 1.2]
        bounds = [(-6, 6)] * len(x_0)
        res = optimizer.optimize(len(x_0), rosen, initial_point=x_0, variable_bounds=bounds)
        np.testing.assert_array_almost_equal(res[0], [1.0] * len(x_0), decimal=2)
        return res

    # ESCH and ISRES do not do well with rosen 
Example #14
Source File: test_optimizers.py    From qiskit-aqua with Apache License 2.0 5 votes vote down vote up
def test_gsls(self):
        """ gsls test """
        optimizer = GSLS(sample_size_factor=40, sampling_radius=1.0e-12, maxiter=10000,
                         max_eval=10000, min_step_size=1.0e-12)
        x_0 = [1.3, 0.7, 0.8, 1.9, 1.2]
        _, x_value, n_evals = optimizer.optimize(len(x_0), rosen, initial_point=x_0)

        # Ensure value is near-optimal
        self.assertLessEqual(x_value, 0.01)
        self.assertLessEqual(n_evals, 10000) 
Example #15
Source File: test_optimizers.py    From qiskit-aqua with Apache License 2.0 5 votes vote down vote up
def _optimize(self, optimizer):
        x_0 = [1.3, 0.7, 0.8, 1.9, 1.2]
        res = optimizer.optimize(len(x_0), rosen, initial_point=x_0)
        np.testing.assert_array_almost_equal(res[0], [1.0] * len(x_0), decimal=2)
        return res 
Example #16
Source File: test__differential_evolution.py    From GraphicDesignPatternByPython with MIT License 5 votes vote down vote up
def test_maxiter_none_GH5731(self):
        # Pre 0.17 the previous default for maxiter and maxfun was None.
        # the numerical defaults are now 1000 and np.inf. However, some scripts
        # will still supply None for both of those, this will raise a TypeError
        # in the solve method.
        solver = DifferentialEvolutionSolver(rosen, self.bounds, maxiter=None,
                                             maxfun=None)
        solver.solve() 
Example #17
Source File: test__differential_evolution.py    From GraphicDesignPatternByPython with MIT License 5 votes vote down vote up
def test_calculate_population_energies(self):
        # if popsize is 3 then the overall generation has size (6,)
        solver = DifferentialEvolutionSolver(rosen, self.bounds, popsize=3)
        solver._calculate_population_energies()

        assert_equal(np.argmin(solver.population_energies), 0)

        # initial calculation of the energies should require 6 nfev.
        assert_equal(solver._nfev, 6) 
Example #18
Source File: test__differential_evolution.py    From GraphicDesignPatternByPython with MIT License 5 votes vote down vote up
def test_gh_4511_regression(self):
        # This modification of the differential evolution docstring example
        # uses a custom popsize that had triggered an off-by-one error.
        # Because we do not care about solving the optimization problem in
        # this test, we use maxiter=1 to reduce the testing time.
        bounds = [(-5, 5), (-5, 5)]
        result = differential_evolution(rosen, bounds, popsize=1815, maxiter=1) 
Example #19
Source File: test__differential_evolution.py    From GraphicDesignPatternByPython with MIT License 5 votes vote down vote up
def test_exp_runs(self):
        # test whether exponential mutation loop runs
        solver = DifferentialEvolutionSolver(rosen,
                                             self.bounds,
                                             strategy='best1exp',
                                             maxiter=1)

        solver.solve() 
Example #20
Source File: test__differential_evolution.py    From GraphicDesignPatternByPython with MIT License 5 votes vote down vote up
def test_maxfun_stops_solve(self):
        # test that if the maximum number of function evaluations is exceeded
        # during initialisation the solver stops
        solver = DifferentialEvolutionSolver(rosen, self.bounds, maxfun=1,
                                             polish=False)
        result = solver.solve()

        assert_equal(result.nfev, 2)
        assert_equal(result.success, False)
        assert_equal(result.message,
                     'Maximum number of function evaluations has '
                     'been exceeded.')

        # test that if the maximum number of function evaluations is exceeded
        # during the actual minimisation, then the solver stops.
        # Have to turn polishing off, as this will still occur even if maxfun
        # is reached. For popsize=5 and len(bounds)=2, then there are only 10
        # function evaluations during initialisation.
        solver = DifferentialEvolutionSolver(rosen,
                                             self.bounds,
                                             popsize=5,
                                             polish=False,
                                             maxfun=40)
        result = solver.solve()

        assert_equal(result.nfev, 41)
        assert_equal(result.success, False)
        assert_equal(result.message,
                         'Maximum number of function evaluations has '
                              'been exceeded.') 
Example #21
Source File: test__differential_evolution.py    From GraphicDesignPatternByPython with MIT License 5 votes vote down vote up
def test_init_with_invalid_strategy(self):
        # test that passing an invalid strategy raises ValueError
        func = rosen
        bounds = [(-3, 3)]
        assert_raises(ValueError,
                          differential_evolution,
                          func,
                          bounds,
                          strategy='abc') 
Example #22
Source File: test__differential_evolution.py    From GraphicDesignPatternByPython with MIT License 5 votes vote down vote up
def test_callback_terminates(self):
        # test that if the callback returns true, then the minimization halts
        bounds = [(0, 2), (0, 2)]

        def callback(param, convergence=0.):
            return True

        result = differential_evolution(rosen, bounds, callback=callback)

        assert_string_equal(result.message,
                                'callback function requested stop early '
                                'by returning True') 
Example #23
Source File: test__differential_evolution.py    From GraphicDesignPatternByPython with MIT License 5 votes vote down vote up
def test_invalid_mutation_values_arent_accepted(self):
        func = rosen
        mutation = (0.5, 3)
        assert_raises(ValueError,
                          DifferentialEvolutionSolver,
                          func,
                          self.bounds,
                          mutation=mutation)

        mutation = (-1, 1)
        assert_raises(ValueError,
                          DifferentialEvolutionSolver,
                          func,
                          self.bounds,
                          mutation=mutation)

        mutation = (0.1, np.nan)
        assert_raises(ValueError,
                          DifferentialEvolutionSolver,
                          func,
                          self.bounds,
                          mutation=mutation)

        mutation = 0.5
        solver = DifferentialEvolutionSolver(func,
                                             self.bounds,
                                             mutation=mutation)
        assert_equal(0.5, solver.scale)
        assert_equal(None, solver.dither) 
Example #24
Source File: test_optimize.py    From GraphicDesignPatternByPython with MIT License 5 votes vote down vote up
def setup_method(self):
        self.x0 = [5, 5]
        self.func = optimize.rosen
        self.jac = optimize.rosen_der
        self.hess = optimize.rosen_hess
        self.hessp = optimize.rosen_hess_prod
        self.bounds = [(0., 10.), (0., 10.)] 
Example #25
Source File: test_optimize.py    From GraphicDesignPatternByPython with MIT License 5 votes vote down vote up
def test_custom(self):
        # This function comes from the documentation example.
        def custmin(fun, x0, args=(), maxfev=None, stepsize=0.1,
                maxiter=100, callback=None, **options):
            bestx = x0
            besty = fun(x0)
            funcalls = 1
            niter = 0
            improved = True
            stop = False

            while improved and not stop and niter < maxiter:
                improved = False
                niter += 1
                for dim in range(np.size(x0)):
                    for s in [bestx[dim] - stepsize, bestx[dim] + stepsize]:
                        testx = np.copy(bestx)
                        testx[dim] = s
                        testy = fun(testx, *args)
                        funcalls += 1
                        if testy < besty:
                            besty = testy
                            bestx = testx
                            improved = True
                    if callback is not None:
                        callback(bestx)
                    if maxfev is not None and funcalls >= maxfev:
                        stop = True
                        break

            return optimize.OptimizeResult(fun=besty, x=bestx, nit=niter,
                                           nfev=funcalls, success=(niter > 1))

        x0 = [1.35, 0.9, 0.8, 1.1, 1.2]
        res = optimize.minimize(optimize.rosen, x0, method=custmin,
                                options=dict(stepsize=0.05))
        assert_allclose(res.x, 1.0, rtol=1e-4, atol=1e-4) 
Example #26
Source File: test_optimize.py    From GraphicDesignPatternByPython with MIT License 5 votes vote down vote up
def test_minimize_l_bfgs_maxls(self):
        # check that the maxls is passed down to the Fortran routine
        sol = optimize.minimize(optimize.rosen, np.array([-1.2,1.0]),
                                method='L-BFGS-B', jac=optimize.rosen_der,
                                options={'disp': False, 'maxls': 1})
        assert_(not sol.success) 
Example #27
Source File: test__differential_evolution.py    From GraphicDesignPatternByPython with MIT License 4 votes vote down vote up
def test_population_initiation(self):
        # test the different modes of population initiation

        # init must be either 'latinhypercube' or 'random'
        # raising ValueError is something else is passed in
        assert_raises(ValueError,
                      DifferentialEvolutionSolver,
                      *(rosen, self.bounds),
                      **{'init': 'rubbish'})

        solver = DifferentialEvolutionSolver(rosen, self.bounds)

        # check that population initiation:
        # 1) resets _nfev to 0
        # 2) all population energies are np.inf
        solver.init_population_random()
        assert_equal(solver._nfev, 0)
        assert_(np.all(np.isinf(solver.population_energies)))

        solver.init_population_lhs()
        assert_equal(solver._nfev, 0)
        assert_(np.all(np.isinf(solver.population_energies)))

        # we should be able to initialise with our own array
        population = np.linspace(-1, 3, 10).reshape(5, 2)
        solver = DifferentialEvolutionSolver(rosen, self.bounds,
                                             init=population,
                                             strategy='best2bin',
                                             atol=0.01, seed=1, popsize=5)

        assert_equal(solver._nfev, 0)
        assert_(np.all(np.isinf(solver.population_energies)))
        assert_(solver.num_population_members == 5)
        assert_(solver.population_shape == (5, 2))

        # check that the population was initialised correctly
        unscaled_population = np.clip(solver._unscale_parameters(population),
                                      0, 1)
        assert_almost_equal(solver.population[:5], unscaled_population)

        # population values need to be clipped to bounds
        assert_almost_equal(np.min(solver.population[:5]), 0)
        assert_almost_equal(np.max(solver.population[:5]), 1)

        # shouldn't be able to initialise with an array if it's the wrong shape
        # this would have too many parameters
        population = np.linspace(-1, 3, 15).reshape(5, 3)
        assert_raises(ValueError,
                      DifferentialEvolutionSolver,
                      *(rosen, self.bounds),
                      **{'init': population}) 
Example #28
Source File: test_optimize.py    From GraphicDesignPatternByPython with MIT License 4 votes vote down vote up
def test_minimize_callback_copies_array(self, method):
        # Check that arrays passed to callbacks are not modified
        # inplace by the optimizer afterward

        if method in ('fmin_tnc', 'fmin_l_bfgs_b'):
            func = lambda x: (optimize.rosen(x), optimize.rosen_der(x))
        else:
            func = optimize.rosen
            jac = optimize.rosen_der
            hess = optimize.rosen_hess

        x0 = np.zeros(10)

        # Set options
        kwargs = {}
        if method.startswith('fmin'):
            routine = getattr(optimize, method)
            if method == 'fmin_slsqp':
                kwargs['iter'] = 5
            elif method == 'fmin_tnc':
                kwargs['maxfun'] = 100
            else:
                kwargs['maxiter'] = 5
        else:
            def routine(*a, **kw):
                kw['method'] = method
                return optimize.minimize(*a, **kw)

            if method == 'TNC':
                kwargs['options'] = dict(maxiter=100)
            else:
                kwargs['options'] = dict(maxiter=5)

        if method in ('fmin_ncg',):
            kwargs['fprime'] = jac
        elif method in ('Newton-CG',):
            kwargs['jac'] = jac
        elif method in ('trust-krylov', 'trust-exact', 'trust-ncg', 'dogleg',
                        'trust-constr'):
            kwargs['jac'] = jac
            kwargs['hess'] = hess

        # Run with callback
        results = []

        def callback(x, *args, **kwargs):
            results.append((x, np.copy(x)))

        sol = routine(func, x0, callback=callback, **kwargs)

        # Check returned arrays coincide with their copies and have no memory overlap
        assert_(len(results) > 2)
        assert_(all(np.all(x == y) for x, y in results))
        assert_(not any(np.may_share_memory(x[0], y[0]) for x, y in itertools.combinations(results, 2)))