Skip to content
This repository
Browse code

ENH: drop full_output parameter from minimize_scalar

  • Loading branch information...
commit 54ee3ad0249844a9ded87f3ef595ae62dfe1f709 1 parent 4aa309e
Denis Laxalde authored April 18, 2012
15  scipy/optimize/_minimize.py
@@ -363,7 +363,7 @@ def minimize(fun, x0, args=(), method='BFGS', jac=None, hess=None,
363 363
 
364 364
 
365 365
 def minimize_scalar(fun, bracket=None, bounds=None, args=(),
366  
-                    method='brent', options=dict(), full_output=False):
  366
+                    method='brent', options=dict()):
367 367
     """
368 368
     Minimization of scalar function of one variable.
369 369
 
@@ -400,15 +400,13 @@ def minimize_scalar(fun, bracket=None, bounds=None, args=(),
400 400
                 Maximum number of iterations to perform.
401 401
             disp : bool
402 402
                 Set to True to print convergence messages.
403  
-    full_output : bool, optional
404  
-        If True, return optional outputs.  Default is False.
405 403
 
406 404
     Returns
407 405
     -------
408 406
     xopt : ndarray
409 407
         The solution.
410 408
     info : dict
411  
-        A dictionary of optional outputs (depending on the chosen method)
  409
+        A dictionary of extra outputs (depending on the chosen method)
412 410
         with the keys:
413 411
             success : bool
414 412
                 Boolean flag indicating if a solution was found.
@@ -471,17 +469,14 @@ def minimize_scalar(fun, bracket=None, bounds=None, args=(),
471 469
     meth = method.lower()
472 470
 
473 471
     if meth == 'brent':
474  
-        return _minimize_scalar_brent(fun, bracket, args, options,
475  
-                                      full_output)
  472
+        return _minimize_scalar_brent(fun, bracket, args, options)
476 473
     elif meth == 'bounded':
477 474
         if bounds is None:
478 475
             raise ValueError('The `bounds` parameter is mandatory for '
479 476
                              'method `bounded`.')
480  
-        return _minimize_scalar_bounded(fun, bounds, args, options,
481  
-                                        full_output)
  477
+        return _minimize_scalar_bounded(fun, bounds, args, options)
482 478
     elif meth == 'golden':
483  
-        return _minimize_scalar_golden(fun, bracket, args, options,
484  
-                                       full_output)
  479
+        return _minimize_scalar_golden(fun, bracket, args, options)
485 480
     else:
486 481
         raise ValueError('Unknown solver %s' % method)
487 482
 
70  scipy/optimize/optimize.py
@@ -1349,16 +1349,13 @@ def fminbound(func, x1, x2, args=(), xtol=1e-5, maxfun=500,
1349 1349
                'maxfev': maxfun,
1350 1350
                'disp': disp}
1351 1351
 
1352  
-    out =  _minimize_scalar_bounded(func, (x1, x2), args, options,
1353  
-                                    full_output)
  1352
+    x, info =  _minimize_scalar_bounded(func, (x1, x2), args, options)
1354 1353
     if full_output:
1355  
-        x, info = out
1356 1354
         return x, info['fun'], info['status'], info['nfev']
1357 1355
     else:
1358  
-        return out
  1356
+        return x
1359 1357
 
1360  
-def _minimize_scalar_bounded(func, bounds, args=(), options={},
1361  
-                             full_output=False):
  1358
+def _minimize_scalar_bounded(func, bounds, args=(), options={}):
1362 1359
     # retrieve options
1363 1360
     xtol = options.get('xtol', 1e-5)
1364 1361
     maxfun = options.get('maxfev', 500)
@@ -1474,18 +1471,15 @@ def _minimize_scalar_bounded(func, bounds, args=(), options={},
1474 1471
     if disp > 0:
1475 1472
         _endprint(x, flag, fval, maxfun, xtol, disp)
1476 1473
 
1477  
-    if full_output:
1478  
-        info = {'fun': fval,
1479  
-                'status': flag,
1480  
-                'success': flag == 0,
1481  
-                'message': {0: 'Solution found.',
1482  
-                            1: 'Maximum number of function '
1483  
-                               'calls reached.'}.get(flag, ''),
1484  
-                'nfev': num}
1485  
-
1486  
-        return xf, info
1487  
-    else:
1488  
-        return xf
  1474
+    info = {'fun': fval,
  1475
+            'status': flag,
  1476
+            'success': flag == 0,
  1477
+            'message': {0: 'Solution found.',
  1478
+                        1: 'Maximum number of function '
  1479
+                           'calls reached.'}.get(flag, ''),
  1480
+            'nfev': num}
  1481
+
  1482
+    return xf, info
1489 1483
 
1490 1484
 class Brent:
1491 1485
     #need to rethink design of __init__
@@ -1674,33 +1668,27 @@ def brent(func, args=(), brack=None, tol=1.48e-8, full_output=0, maxiter=500):
1674 1668
     """
1675 1669
     options = {'ftol': tol,
1676 1670
                'maxiter': maxiter}
1677  
-    out = _minimize_scalar_brent(func, brack, args, options, full_output)
  1671
+    x, info = _minimize_scalar_brent(func, brack, args, options)
1678 1672
     if full_output:
1679  
-        x, info = out
1680 1673
         return x, info['fun'], info['nit'], info['nfev']
1681 1674
     else:
1682  
-        return out
  1675
+        return x
1683 1676
 
1684  
-def _minimize_scalar_brent(func, brack=None, args=(), options={},
1685  
-                           full_output=False):
  1677
+def _minimize_scalar_brent(func, brack=None, args=(), options={}):
1686 1678
     # retrieve options
1687 1679
     tol = options.get('ftol', 1.48e-8)
1688 1680
     maxiter = options.get('maxiter', 500)
1689 1681
 
1690 1682
 
1691 1683
     brent = Brent(func=func, args=args, tol=tol,
1692  
-                  full_output=full_output, maxiter=maxiter)
  1684
+                  full_output=True, maxiter=maxiter)
1693 1685
     brent.set_bracket(brack)
1694 1686
     brent.optimize()
1695  
-    out = brent.get_result(full_output=full_output)
1696  
-    if full_output:
1697  
-        x, fval, nit, nfev = out
1698  
-        info = {'fun': fval,
1699  
-                'nit': nit,
1700  
-                'nfev': nfev}
1701  
-        return x, info
1702  
-    else:
1703  
-        return out
  1687
+    x, fval, nit, nfev = brent.get_result(full_output=True)
  1688
+    info = {'fun': fval,
  1689
+            'nit': nit,
  1690
+            'nfev': nfev}
  1691
+    return x, info
1704 1692
 
1705 1693
 def golden(func, args=(), brack=None, tol=_epsilon, full_output=0):
1706 1694
     """ Given a function of one-variable and a possible bracketing interval,
@@ -1736,15 +1724,13 @@ def golden(func, args=(), brack=None, tol=_epsilon, full_output=0):
1736 1724
 
1737 1725
     """
1738 1726
     options = {'ftol': tol}
1739  
-    out = _minimize_scalar_golden(func, brack, args, options, full_output)
  1727
+    x, info = _minimize_scalar_golden(func, brack, args, options)
1740 1728
     if full_output:
1741  
-        x, info = out
1742 1729
         return x, info['fun'], info['nfev']
1743 1730
     else:
1744  
-        return out
  1731
+        return x
1745 1732
 
1746  
-def _minimize_scalar_golden(func, brack=None, args=(), options={},
1747  
-                            full_output=False):
  1733
+def _minimize_scalar_golden(func, brack=None, args=(), options={}):
1748 1734
     tol = options.get('ftol', _epsilon)
1749 1735
     if brack is None:
1750 1736
         xa, xb, xc, fa, fb, fc, funcalls = bracket(func, args=args)
@@ -1792,11 +1778,9 @@ def _minimize_scalar_golden(func, brack=None, args=(), options={},
1792 1778
     else:
1793 1779
         xmin = x2
1794 1780
         fval = f2
1795  
-    if full_output:
1796  
-        info = {'fun': fval, 'nfev': funcalls}
1797  
-        return xmin, info
1798  
-    else:
1799  
-        return xmin
  1781
+
  1782
+    info = {'fun': fval, 'nfev': funcalls}
  1783
+    return xmin, info
1800 1784
 
1801 1785
 
1802 1786
 def bracket(func, xa=0.0, xb=1.0, args=(), grow_limit=110.0, maxiter=1000):
36  scipy/optimize/tests/test_optimize.py
@@ -520,45 +520,45 @@ def test_fminbound_scalar(self):
520 520
 
521 521
     def test_minimize_scalar(self):
522 522
         # combine all tests above for the minimize_scalar wrapper
523  
-        x = optimize.minimize_scalar(self.fun)
  523
+        x = optimize.minimize_scalar(self.fun)[0]
524 524
         assert_allclose(x, self.solution, atol=1e-6)
525 525
 
526  
-        x = optimize.minimize_scalar(self.fun, bracket = (-3, -2),
527  
-                                     args=(1.5, ), method='Brent')
  526
+        x= optimize.minimize_scalar(self.fun, bracket = (-3, -2),
  527
+                                    args=(1.5, ), method='Brent')[0]
528 528
         assert_allclose(x, self.solution, atol=1e-6)
529 529
 
530  
-        x = optimize.minimize_scalar(self.fun, method='Brent',
531  
-                                     args=(1.5, ), full_output=True)[0]
  530
+        x= optimize.minimize_scalar(self.fun, method='Brent',
  531
+                                    args=(1.5,))[0]
532 532
         assert_allclose(x, self.solution, atol=1e-6)
533 533
 
534  
-        x = optimize.minimize_scalar(self.fun, bracket=(-15, -1, 15),
535  
-                                     args=(1.5, ), method='Brent')
  534
+        x= optimize.minimize_scalar(self.fun, bracket=(-15, -1, 15),
  535
+                                    args=(1.5, ), method='Brent')[0]
536 536
         assert_allclose(x, self.solution, atol=1e-6)
537 537
 
538 538
         x = optimize.minimize_scalar(self.fun, bracket = (-3, -2),
539  
-                                     args=(1.5, ), method='golden')
  539
+                                     args=(1.5, ), method='golden')[0]
540 540
         assert_allclose(x, self.solution, atol=1e-6)
541 541
 
542 542
         x = optimize.minimize_scalar(self.fun, method='golden',
543  
-                                     args=(1.5, ), full_output=True)[0]
  543
+                                     args=(1.5,))[0]
544 544
         assert_allclose(x, self.solution, atol=1e-6)
545 545
 
546 546
         x = optimize.minimize_scalar(self.fun, bracket=(-15, -1, 15),
547  
-                                     args=(1.5, ), method='golden')
  547
+                                     args=(1.5, ), method='golden')[0]
548 548
         assert_allclose(x, self.solution, atol=1e-6)
549 549
 
550 550
         x = optimize.minimize_scalar(self.fun, bounds=(0, 1), args=(1.5,),
551  
-                                     method='Bounded')
  551
+                                     method='Bounded')[0]
552 552
         assert_allclose(x, 1, atol=1e-4)
553 553
 
554  
-        x = optimize.minimize_scalar(self.fun, bounds=(1, 5), args=(1.5, ),
555  
-                                     method='bounded')
  554
+        x= optimize.minimize_scalar(self.fun, bounds=(1, 5), args=(1.5, ),
  555
+                                    method='bounded')[0]
556 556
         assert_allclose(x, self.solution, atol=1e-6)
557 557
 
558  
-        x = optimize.minimize_scalar(self.fun,
559  
-                                     bounds=(np.array([1]), np.array([5])),
560  
-                                     args=(np.array([1.5]), ),
561  
-                                     method='bounded')
  558
+        x= optimize.minimize_scalar(self.fun, bounds=(np.array([1]),
  559
+                                                      np.array([5])),
  560
+                                    args=(np.array([1.5]), ),
  561
+                                    method='bounded')[0]
562 562
         assert_allclose(x, self.solution, atol=1e-6)
563 563
 
564 564
         assert_raises(ValueError, optimize.minimize_scalar, self.fun,
@@ -568,7 +568,7 @@ def test_minimize_scalar(self):
568 568
                       bounds=(np.zeros(2), 1), method='bounded', args=(1.5, ))
569 569
 
570 570
         x = optimize.minimize_scalar(self.fun, bounds=(1, np.array(5)),
571  
-                                     method='bounded')
  571
+                                     method='bounded')[0]
572 572
         assert_allclose(x, self.solution, atol=1e-6)
573 573
 
574 574
 class TestTnc(TestCase):

0 notes on commit 54ee3ad

Please sign in to comment.
Something went wrong with that request. Please try again.