Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

WIP/ENH: optimize.minimize_scalar: add method 'chandrupatla'? #20624

Closed
wants to merge 1 commit into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
18 changes: 13 additions & 5 deletions scipy/optimize/_minimize.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,7 @@
NonlinearConstraint, LinearConstraint, Bounds,
PreparedConstraint)
from ._differentiable_functions import FD_METHODS
from ._chandrupatla import _chandrupatla_minimize

MINIMIZE_METHODS = ['nelder-mead', 'powell', 'cg', 'bfgs', 'newton-cg',
'l-bfgs-b', 'tnc', 'cobyla', 'slsqp', 'trust-constr',
Expand All @@ -46,7 +47,7 @@
'l-bfgs-b', 'trust-constr', 'dogleg', 'trust-ncg',
'trust-exact', 'trust-krylov']

MINIMIZE_SCALAR_METHODS = ['brent', 'bounded', 'golden']
MINIMIZE_SCALAR_METHODS = ['brent', 'bounded', 'golden', 'chandrupatla']

def minimize(fun, x0, args=(), method=None, jac=None, hess=None,
hessp=None, bounds=None, constraints=(), tol=None,
Expand Down Expand Up @@ -792,12 +793,14 @@ def minimize_scalar(fun, bracket=None, bounds=None, args=(),
Tolerance for termination. For detailed control, use solver-specific
options.
options : dict, optional
A dictionary of solver options.
A dictionary of solver options. With exceptions where noted, all solvers
support the following options.

maxiter : int
Maximum number of iterations to perform.
disp : bool
Set to True to print convergence messages.
Set to True to print convergence messages. Ignored by
`method='chandrupatla'`.

See :func:`show_options()` for solver-specific options.

Expand Down Expand Up @@ -912,8 +915,9 @@ def minimize_scalar(fun, bracket=None, bounds=None, args=(),
if options is None:
options = {}

if bounds is not None and meth in {'brent', 'golden'}:
message = f"Use of `bounds` is incompatible with 'method={method}'."
if bounds is not None and meth in {'brent', 'golden', 'chandrupatla'}:
message = (f"Use of `bounds` is incompatible with '{method=}'. "
"To enforce bounds, provide a three-point bracket.")
raise ValueError(message)

if tol is not None:
Expand All @@ -925,6 +929,8 @@ def minimize_scalar(fun, bracket=None, bounds=None, args=(),
options['xatol'] = tol
elif meth == '_custom':
options.setdefault('tol', tol)
elif meth == 'chandrupatla':
options.setdefault('xrtol', tol)
else:
options.setdefault('xtol', tol)

Expand All @@ -946,6 +952,8 @@ def minimize_scalar(fun, bracket=None, bounds=None, args=(),
elif meth == 'golden':
res = _recover_from_bracket_error(_minimize_scalar_golden,
fun, bracket, args, **options)
elif method == 'chandrupatla':
res = _chandrupatla_minimize(fun, *bracket, args=args, **options)
else:
raise ValueError('Unknown solver %s' % method)

Expand Down
18 changes: 18 additions & 0 deletions scipy/optimize/_optimize.py
Original file line number Diff line number Diff line change
Expand Up @@ -2874,6 +2874,23 @@ def _minimize_scalar_golden(func, brack=None, args=(),
success=success, message=message)



def _minimize_scalar_chandrupatla(func, brack=None, args=(), tol=1.48e-8,
maxiter=500, disp=0, **unknown_options):
"""
Options
-------
maxiter : int
Maximum number of iterations to perform.
xatol, xrtol, fatol, frtol : float
Blah blah
disp: int, optional
Ignored.

"""
pass


def bracket(func, xa=0.0, xb=1.0, args=(), grow_limit=110.0, maxiter=1000):
"""
Bracket the minimum of a function.
Expand Down Expand Up @@ -4037,6 +4054,7 @@ def show_options(solver=None, method=None, disp=True):
('brent', 'scipy.optimize._optimize._minimize_scalar_brent'),
('bounded', 'scipy.optimize._optimize._minimize_scalar_bounded'),
('golden', 'scipy.optimize._optimize._minimize_scalar_golden'),
('chandrupatla', 'scipy.optimize._optimize._minimize_scalar_chandrupatla'),
),
}

Expand Down
94 changes: 79 additions & 15 deletions scipy/optimize/tests/test_optimize.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,7 @@
from scipy.optimize._qap import QUADRATIC_ASSIGNMENT_METHODS
from scipy.optimize._differentiable_functions import ScalarFunction, FD_METHODS
from scipy.optimize._optimize import MemoizeJac, show_options, OptimizeResult
from scipy.optimize._bracket import _bracket_minimum
from scipy.optimize import rosen, rosen_der, rosen_hess

from scipy.sparse import (coo_matrix, csc_matrix, csr_matrix, coo_array,
Expand Down Expand Up @@ -1840,8 +1841,8 @@ def fun(x):
return x**2
optimize.fminbound(fun, 0, 0)

def test_minimize_scalar(self):
# combine all tests above for the minimize_scalar wrapper
def test_minimize_scalar_brent(self):
# perform `test_brent` above but with `minimize_scalar` wrapper
x = optimize.minimize_scalar(self.fun).x
assert_allclose(x, self.solution, atol=1e-6)

Expand All @@ -1864,6 +1865,8 @@ def test_minimize_scalar(self):
args=(1.5, ), method='Brent').x
assert_allclose(x, self.solution, atol=1e-6)

def test_minimize_scalar_golden(self):
# perform `test_golden` above but with `minimize_scalar` wrapper
x = optimize.minimize_scalar(self.fun, bracket=(-3, -2),
args=(1.5, ), method='golden').x
assert_allclose(x, self.solution, atol=1e-6)
Expand All @@ -1876,6 +1879,8 @@ def test_minimize_scalar(self):
args=(1.5, ), method='golden').x
assert_allclose(x, self.solution, atol=1e-6)

def test_minimize_scalar_bounded(self):
# perform `test_fminbound` above but with `minimize_scalar` wrapper
x = optimize.minimize_scalar(self.fun, bounds=(0, 1), args=(1.5,),
method='Bounded').x
assert_allclose(x, 1, atol=1e-4)
Expand All @@ -1900,6 +1905,57 @@ def test_minimize_scalar(self):
method='bounded').x
assert_allclose(x, self.solution, atol=1e-6)


def _minimize_scalar_chandrupatla(self, fun, bracket=(), bounds=(), args=(), tol=None,
options={}):
if not bracket and not bounds:
kwargs = dict(xm0=0.0, xr0=1.0)
elif len(bracket) == 2:
kwargs = dict(xm0=bracket[0], xr0=bracket[1])
elif len(bracket) == 3:
kwargs = dict(xl0=bracket[0], xm0=bracket[1], xr0=bracket[2])
elif bounds and not bracket:
kwargs = dict(xm0=(bounds[0] + bounds[1])/2, xmin=bounds[0], xmax=bounds[1])

res = _bracket_minimum(self.fun, args=args, **kwargs)
if res.status == -1: # bracket converges to single point
res.x = res.xm
return res

bracket = (res.xl, res.xm, res.xr)
return optimize.minimize_scalar(fun, bracket=bracket, method='chandrupatla',
args=args, tol=tol, options=options)


def test_minimize_scalar_chandrupatla(self):
# perform relevant tests of other methods with method='chandrupatla'

x = self._minimize_scalar_chandrupatla(self.fun).x
assert_allclose(x, self.solution, atol=1e-6)

res = self._minimize_scalar_chandrupatla(self.fun, options=dict(maxiter=3))
assert not res.success

x = self._minimize_scalar_chandrupatla(self.fun, bracket=(-3, -2), args=(1.5, )).x
assert_allclose(x, self.solution, atol=1e-6)

x = self._minimize_scalar_chandrupatla(self.fun, args=(1.5, )).x
assert_allclose(x, self.solution, atol=1e-6)

x = self._minimize_scalar_chandrupatla(self.fun, bracket=(-15, -1, 15), args=(1.5,)).x
assert_allclose(x, self.solution, atol=1e-6)

x = self._minimize_scalar_chandrupatla(self.fun, bounds=(0, 1), args=(1.5,)).x
assert_allclose(x, 1, atol=1e-4)

x = self._minimize_scalar_chandrupatla(self.fun, bounds=(1, 5), args=(1.5, )).x
assert_allclose(x, self.solution, atol=1e-6)

x = self._minimize_scalar_chandrupatla(self.fun, bounds=(np.array([1]), np.array([5])),
args=(np.array([1.5]), )).x
assert_allclose(x, self.solution, atol=1e-6)


def test_minimize_scalar_custom(self):
# This function comes from the documentation example.
def custmin(fun, bracket, args=(), maxfev=None, stepsize=0.1,
Expand Down Expand Up @@ -1939,25 +1995,32 @@ def test_minimize_scalar_coerce_args_param(self):
# Regression test for gh-3503
optimize.minimize_scalar(self.fun, args=1.5)

@pytest.mark.parametrize('method', ['brent', 'bounded', 'golden'])
@pytest.mark.parametrize('method', ['brent', 'bounded', 'golden', 'chandrupatla'])
def test_disp(self, method):
# test that all minimize_scalar methods accept a disp option.
# sure, chandrupatla accepts it, but ignores it
for disp in [0, 1, 2, 3]:
optimize.minimize_scalar(self.fun, options={"disp": disp})

@pytest.mark.parametrize('method', ['brent', 'bounded', 'golden'])
@pytest.mark.parametrize('method', ['brent', 'bounded', 'golden', 'chandrupatla'])
def test_result_attributes(self, method):
kwargs = {"bounds": [-10, 10]} if method == 'bounded' else {}
result = optimize.minimize_scalar(self.fun, method=method, **kwargs)
if method == 'chandrupatla':
result = self._minimize_scalar_chandrupatla(self.fun, **kwargs)
else:
result = optimize.minimize_scalar(self.fun, method=method, **kwargs)
assert hasattr(result, "x")
assert hasattr(result, "success")
assert hasattr(result, "message")
if method == 'chandrupatla':
assert hasattr(result, "status")
else:
assert hasattr(result, "message")
assert hasattr(result, "fun")
assert hasattr(result, "nfev")
assert hasattr(result, "nit")

@pytest.mark.filterwarnings('ignore::UserWarning')
@pytest.mark.parametrize('method', ['brent', 'bounded', 'golden'])
@pytest.mark.parametrize('method', ['brent', 'bounded', 'golden', 'chandrupatla'])
def test_nan_values(self, method):
# Check nan values result to failed exit status
np.random.seed(1234)
Expand All @@ -1967,20 +2030,15 @@ def test_nan_values(self, method):
def func(x):
count[0] += 1
if count[0] > 4:
return np.nan
return np.nan*x
else:
return x**2 + 0.1 * np.sin(x)

bracket = (-1, 0, 1)
bounds = (-1, 1)

with np.errstate(invalid='ignore'), suppress_warnings() as sup:
sup.filter(UserWarning, "delta_grad == 0.*")
sup.filter(RuntimeWarning, ".*does not use Hessian.*")
sup.filter(RuntimeWarning, ".*does not use gradient.*")

with np.errstate(invalid='ignore'):
count = [0]

kwargs = {"bounds": bounds} if method == 'bounded' else {}
sol = optimize.minimize_scalar(func, bracket=bracket,
**kwargs, method=method,
Expand Down Expand Up @@ -2029,11 +2087,17 @@ def f(x):
return np.array(x**4).reshape(fshape)

a, b = -0.1, 0.2
if method == 'chandrupatla':
a = np.broadcast_to(a, fshape)
b = np.broadcast_to(b, fshape)
kwargs = (dict(bracket=(a, b)) if method != "bounded"
else dict(bounds=(a, b)))
kwargs.update(dict(method=method, tol=tol))

res = optimize.minimize_scalar(f, **kwargs)
if kwargs.pop('method') == 'chandrupatla':
res = self._minimize_scalar_chandrupatla(f, **kwargs)
else:
res = optimize.minimize_scalar(f, **kwargs)
assert res.x.shape == res.fun.shape == f(res.x).shape == fshape

@pytest.mark.parametrize('method', ['bounded', 'brent', 'golden'])
Expand Down