Python scipy.optimize.brent() Examples
The following are 13
code examples of scipy.optimize.brent().
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
You may also want to check out all available functions/classes of the module
scipy.optimize
, or try the search function
.
Example #1
Source File: transform.py From skutil with BSD 3-Clause "New" or "Revised" License | 6 votes |
def _yj_normmax(x, brack=(-2, 2)): """Compute optimal YJ transform parameter for input data. Parameters ---------- x : array_like Input array. brack : 2-tuple The starting interval for a downhill bracket search """ # Use MLE to compute the optimal YJ parameter def _mle_opt(i, brck): def _eval_mle(lmb, data): # Function to minimize return -_yj_llf(data, lmb) return optimize.brent(_eval_mle, brack=brck, args=(i,)) return _mle_opt(x, brack) # _mle(x, brack)
Example #2
Source File: morestats.py From Computable with MIT License | 6 votes |
def boxcox_normmax(x,brack=(-1.0,1.0)): N = len(x) # compute uniform median statistics Ui = zeros(N)*1.0 Ui[-1] = 0.5**(1.0/N) Ui[0] = 1-Ui[-1] i = arange(2,N) Ui[1:-1] = (i-0.3175)/(N+0.365) # this function computes the x-axis values of the probability plot # and computes a linear regression (including the correlation) # and returns 1-r so that a minimization function maximizes the # correlation xvals = distributions.norm.ppf(Ui) def tempfunc(lmbda, xvals, samps): y = boxcox(samps,lmbda) yvals = sort(y) r, prob = stats.pearsonr(xvals, yvals) return 1-r return optimize.brent(tempfunc, brack=brack, args=(xvals, x))
Example #3
Source File: data.py From Mastering-Elasticsearch-7.0 with MIT License | 6 votes |
def _yeo_johnson_optimize(self, x): """Find and return optimal lambda parameter of the Yeo-Johnson transform by MLE, for observed data x. Like for Box-Cox, MLE is done via the brent optimizer. """ def _neg_log_likelihood(lmbda): """Return the negative log likelihood of the observed data x as a function of lambda.""" x_trans = self._yeo_johnson_transform(x, lmbda) n_samples = x.shape[0] loglike = -n_samples / 2 * np.log(x_trans.var()) loglike += (lmbda - 1) * (np.sign(x) * np.log1p(np.abs(x))).sum() return -loglike # the computation of lambda is influenced by NaNs so we need to # get rid of them x = x[~np.isnan(x)] # choosing bracket -2, 2 like for boxcox return optimize.brent(_neg_log_likelihood, brack=(-2, 2))
Example #4
Source File: skewness.py From skoot with MIT License | 6 votes |
def _yj_est_lam(y, brack, dtype=np.float32): y = np.asarray(y).astype(dtype) # Use MLE to compute the optimal YJ parameter def _mle_opt(i, brck): def _eval_mle(lmb, data): # Function to minimize return -_yj_llf(data, lmb) # Suppress the invalid scalar warnings we might get in the # optimization routine. @suppress def brent_optimize(): return optimize.brent(_eval_mle, brack=brck, args=(i,)) # suppressed version: return brent_optimize() return _mle_opt(y, brack) # _mle(x, brack)
Example #5
Source File: property_package.py From thermo with MIT License | 6 votes |
def P_dew_at_T(self, T, zs, Psats=None): Psats = self._Psats(Psats, T) Pmax = self.P_bubble_at_T(T, zs, Psats) diff = 1E-7 # EOSs do not solve at very low pressure if self.use_phis: Pmin = max(Pmax*diff, 1) else: Pmin = Pmax*diff P_dew = brenth(self._T_VF_err, Pmin, Pmax, args=(T, zs, Psats, Pmax, 1)) self.__TVF_solve_cache = None return P_dew # try: # return brent(self._dew_P_UNIFAC_err, args=(T, zs, Psats, Pmax), brack=(Pmax*diff, Pmax*(1-diff), Pmax)) # except: # return golden(self._dew_P_UNIFAC_err, args=(T, zs, Psats, Pmax), brack=(Pmax, Pmax*(1-diff))) #
Example #6
Source File: test_optimize.py From Computable with MIT License | 5 votes |
def test_brent(self): """ brent algorithm """ x = optimize.brent(self.fun) assert_allclose(x, self.solution, atol=1e-6) x = optimize.brent(self.fun, brack=(-3, -2)) assert_allclose(x, self.solution, atol=1e-6) x = optimize.brent(self.fun, full_output=True) assert_allclose(x[0], self.solution, atol=1e-6) x = optimize.brent(self.fun, brack=(-15, -1, 15)) assert_allclose(x, self.solution, atol=1e-6)
Example #7
Source File: morestats.py From Computable with MIT License | 5 votes |
def ppcc_max(x, brack=(0.0,1.0), dist='tukeylambda'): """Returns the shape parameter that maximizes the probability plot correlation coefficient for the given data to a one-parameter family of distributions. See also ppcc_plot """ try: ppf_func = eval('distributions.%s.ppf' % dist) except AttributeError: raise ValueError("%s is not a valid distribution with a ppf." % dist) """ res = inspect.getargspec(ppf_func) if not ('loc' == res[0][-2] and 'scale' == res[0][-1] and \ 0.0==res[-1][-2] and 1.0==res[-1][-1]): raise ValueError("Function has does not have default location " "and scale parameters\n that are 0.0 and 1.0 respectively.") if (1 < len(res[0])-len(res[-1])-1) or \ (1 > len(res[0])-3): raise ValueError("Must be a one-parameter family.") """ N = len(x) # compute uniform median statistics Ui = zeros(N)*1.0 Ui[-1] = 0.5**(1.0/N) Ui[0] = 1-Ui[-1] i = arange(2,N) Ui[1:-1] = (i-0.3175)/(N+0.365) osr = sort(x) # this function computes the x-axis values of the probability plot # and computes a linear regression (including the correlation) # and returns 1-r so that a minimization function maximizes the # correlation def tempfunc(shape, mi, yvals, func): xvals = func(mi, shape) r, prob = stats.pearsonr(xvals, yvals) return 1-r return optimize.brent(tempfunc, brack=brack, args=(Ui, osr, ppf_func))
Example #8
Source File: data.py From Mastering-Elasticsearch-7.0 with MIT License | 5 votes |
def _box_cox_optimize(self, x): """Find and return optimal lambda parameter of the Box-Cox transform by MLE, for observed data x. We here use scipy builtins which uses the brent optimizer. """ # the computation of lambda is influenced by NaNs so we need to # get rid of them _, lmbda = stats.boxcox(x[~np.isnan(x)], lmbda=None) return lmbda
Example #9
Source File: test_optimize.py From GraphicDesignPatternByPython with MIT License | 5 votes |
def test_brent(self): x = optimize.brent(self.fun) assert_allclose(x, self.solution, atol=1e-6) x = optimize.brent(self.fun, brack=(-3, -2)) assert_allclose(x, self.solution, atol=1e-6) x = optimize.brent(self.fun, full_output=True) assert_allclose(x[0], self.solution, atol=1e-6) x = optimize.brent(self.fun, brack=(-15, -1, 15)) assert_allclose(x, self.solution, atol=1e-6)
Example #10
Source File: test_optimize.py From GraphicDesignPatternByPython with MIT License | 5 votes |
def test_brent_negative_tolerance(): assert_raises(ValueError, optimize.brent, np.cos, tol=-.01)
Example #11
Source File: 16.1_autoOptimization_detuning.py From xrt with MIT License | 5 votes |
def main(): res = optimize.brent(propagation_function, brack=(0, 1e-5, 5e-5), tol=1e-3, full_output=True) print("Output:", res) plt.figure('dE vs dTheta') plt.plot(np.array(minimizationArray)[:, 0]*1e6, np.array(minimizationArray)[:, 1], 'ro', ls='') plt.grid() axes = plt.gca() axes.set_xlabel("$d\Theta$, $\mu$rad"); axes.set_ylabel("$\Delta$E, eV") plt.savefig("dE_vs_dTheta.png") plt.figure('Flux vs dTheta') plt.plot(np.array(minimizationArray)[:, 0]*1e6, np.array(minimizationArray)[:, 2], 'go', ls='') plt.grid() axes = plt.gca() axes.set_xlabel("$d\Theta$, $\mu$rad"); axes.set_ylabel("Flux, photons/s") plt.savefig("Flux_vs_dTheta.png") plt.figure('Convergence') plt.plot(np.arange(len(minimizationArray)), np.array(minimizationArray)[:, 1], '-bo') axes = plt.gca() axes.set_xlabel("Iteration Nr."); axes.set_ylabel("$\Delta$E, eV") plt.savefig("Convergence.png") plt.show()
Example #12
Source File: morestats.py From Computable with MIT License | 4 votes |
def boxcox(x,lmbda=None,alpha=None): """ Return a positive dataset transformed by a Box-Cox power transformation. Parameters ---------- x : ndarray Input array. lmbda : {None, scalar}, optional If `lmbda` is not None, do the transformation for that value. If `lmbda` is None, find the lambda that maximizes the log-likelihood function and return it as the second output argument. alpha : {None, float}, optional If `alpha` is not None, return the ``100 * (1-alpha)%`` confidence interval for `lmbda` as the third output argument. If `alpha` is not None it must be between 0.0 and 1.0. Returns ------- boxcox : ndarray Box-Cox power transformed array. maxlog : float, optional If the `lmbda` parameter is None, the second returned argument is the lambda that maximizes the log-likelihood function. (min_ci, max_ci) : tuple of float, optional If `lmbda` parameter is None and `alpha` is not None, this returned tuple of floats represents the minimum and maximum confidence limits given `alpha`. """ if any(x < 0): raise ValueError("Data must be positive.") if lmbda is not None: # single transformation lmbda = lmbda*(x == x) y = where(lmbda == 0, log(x), (x**lmbda - 1)/lmbda) return y # Otherwise find the lmbda that maximizes the log-likelihood function. def tempfunc(lmb, data): # function to minimize return -boxcox_llf(lmb,data) lmax = optimize.brent(tempfunc, brack=(-2.0,2.0),args=(x,)) y = boxcox(x, lmax) if alpha is None: return y, lmax # Otherwise find confidence interval interval = _boxcox_conf_interval(x, lmax, alpha) return y, lmax, interval
Example #13
Source File: boxcox.py From sktime with BSD 3-Clause "New" or "Revised" License | 4 votes |
def boxcox_normmax(x, bounds=None, brack=(-2.0, 2.0), method='pearsonr'): # bounds is None, use simple Brent optimisation if bounds is None: def optimizer(func, args): return optimize.brent(func, brack=brack, args=args) # otherwise use bounded Brent optimisation else: # input checks on bounds if not isinstance(bounds, tuple) or len(bounds) != 2: raise ValueError( f"`bounds` must be a tuple of length 2, but found: {bounds}") def optimizer(func, args): return optimize.fminbound(func, bounds[0], bounds[1], args=args) def _pearsonr(x): osm_uniform = _calc_uniform_order_statistic_medians(len(x)) xvals = distributions.norm.ppf(osm_uniform) def _eval_pearsonr(lmbda, xvals, samps): # This function computes the x-axis values of the probability plot # and computes a linear regression (including the correlation) and # returns ``1 - r`` so that a minimization function maximizes the # correlation. y = boxcox(samps, lmbda) yvals = np.sort(y) r, prob = stats.pearsonr(xvals, yvals) return 1 - r return optimizer(_eval_pearsonr, args=(xvals, x)) def _mle(x): def _eval_mle(lmb, data): # function to minimize return -boxcox_llf(lmb, data) return optimizer(_eval_mle, args=(x,)) def _all(x): maxlog = np.zeros(2, dtype=float) maxlog[0] = _pearsonr(x) maxlog[1] = _mle(x) return maxlog methods = {'pearsonr': _pearsonr, 'mle': _mle, 'all': _all} if method not in methods.keys(): raise ValueError("Method %s not recognized." % method) optimfunc = methods[method] return optimfunc(x)