test_optimize.py 109 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793179417951796179717981799180018011802180318041805180618071808180918101811181218131814181518161817181818191820182118221823182418251826182718281829183018311832183318341835183618371838183918401841184218431844184518461847184818491850185118521853185418551856185718581859186018611862186318641865186618671868186918701871187218731874187518761877187818791880188118821883188418851886188718881889189018911892189318941895189618971898189919001901190219031904190519061907190819091910191119121913191419151916191719181919192019211922192319241925192619271928192919301931193219331934193519361937193819391940194119421943194419451946194719481949195019511952195319541955195619571958195919601961196219631964196519661967196819691970197119721973197419751976197719781979198019811982198319841985198619871988198919901991199219931994199519961997199819992000200120022003200420052006200720082009201020112012201320142015201620172018201920202021202220232024202520262027202820292030203120322033203420352036203720382039204020412042204320442045204620472048204920502051205220532054205520562057205820592060206120622063206420652066206720682069207020712072207320742075207620772078207920802081208220832084208520862087208820892090209120922093209420952096209720982099210021012102210321042105210621072108210921102111211221132114211521162117211821192120212121222123212421252126212721282129213021312132213321342135213621372138213921402141214221432144214521462147214821492150215121522153215421552156215721582159216021612162216321642165216621672168216921702171217221732174217521762177217821792180218121822183218421852186218721882189219021912192219321942195219621972198219922002201220222032204220522062207220822092210221122122213221422152216221722182219222022212222222322242225222622272228222922302231223222332234223522362237223822392240224122422243224422452246224722482249225022512252225322542255225622572258225922602261226222632264226522662267226822692270227122722273227422752276227722782279228022812282228322842285228622872288228922902291229222932294229522962297229822992300230123022303230423052306230723082309231023112312231323142315231623172318231923202321232223232324232523262327232823292330233123322333233423352336233723382339234023412342234323442345234623472348234923502351235223532354235523562357235823592360236123622363236423652366236723682369237023712372237323742375237623772378237923802381238223832384238523862387238823892390239123922393239423952396239723982399240024012402240324042405240624072408240924102411241224132414241524162417241824192420242124222423242424252426242724282429243024312432243324342435243624372438243924402441244224432444244524462447244824492450245124522453245424552456245724582459246024612462246324642465246624672468246924702471247224732474247524762477247824792480248124822483248424852486248724882489249024912492249324942495249624972498249925002501250225032504250525062507250825092510251125122513251425152516251725182519252025212522252325242525252625272528252925302531253225332534253525362537253825392540254125422543254425452546254725482549255025512552255325542555255625572558255925602561256225632564256525662567256825692570257125722573257425752576257725782579258025812582258325842585258625872588258925902591259225932594259525962597259825992600260126022603260426052606260726082609261026112612261326142615261626172618261926202621262226232624262526262627262826292630263126322633263426352636263726382639264026412642264326442645264626472648264926502651265226532654265526562657265826592660266126622663266426652666266726682669267026712672267326742675267626772678267926802681268226832684268526862687268826892690269126922693269426952696269726982699270027012702270327042705270627072708270927102711271227132714271527162717271827192720272127222723272427252726272727282729273027312732273327342735273627372738273927402741274227432744274527462747274827492750275127522753275427552756275727582759276027612762276327642765276627672768276927702771277227732774277527762777277827792780278127822783278427852786278727882789279027912792279327942795279627972798279928002801280228032804280528062807280828092810281128122813281428152816281728182819282028212822282328242825282628272828282928302831283228332834283528362837283828392840284128422843284428452846284728482849285028512852285328542855
  1. """
  2. Unit tests for optimization routines from optimize.py
  3. Authors:
  4. Ed Schofield, Nov 2005
  5. Andrew Straw, April 2008
  6. To run it in its simplest form::
  7. nosetests test_optimize.py
  8. """
  9. import itertools
  10. import platform
  11. import numpy as np
  12. from numpy.testing import (assert_allclose, assert_equal,
  13. assert_almost_equal,
  14. assert_no_warnings, assert_warns,
  15. assert_array_less, suppress_warnings)
  16. import pytest
  17. from pytest import raises as assert_raises
  18. from scipy import optimize
  19. from scipy.optimize._minimize import Bounds, NonlinearConstraint
  20. from scipy.optimize._minimize import MINIMIZE_METHODS, MINIMIZE_SCALAR_METHODS
  21. from scipy.optimize._linprog import LINPROG_METHODS
  22. from scipy.optimize._root import ROOT_METHODS
  23. from scipy.optimize._root_scalar import ROOT_SCALAR_METHODS
  24. from scipy.optimize._qap import QUADRATIC_ASSIGNMENT_METHODS
  25. from scipy.optimize._differentiable_functions import ScalarFunction, FD_METHODS
  26. from scipy.optimize._optimize import MemoizeJac, show_options
  27. def test_check_grad():
  28. # Verify if check_grad is able to estimate the derivative of the
  29. # expit (logistic sigmoid) function.
  30. def expit(x):
  31. return 1 / (1 + np.exp(-x))
  32. def der_expit(x):
  33. return np.exp(-x) / (1 + np.exp(-x))**2
  34. x0 = np.array([1.5])
  35. r = optimize.check_grad(expit, der_expit, x0)
  36. assert_almost_equal(r, 0)
  37. r = optimize.check_grad(expit, der_expit, x0,
  38. direction='random', seed=1234)
  39. assert_almost_equal(r, 0)
  40. r = optimize.check_grad(expit, der_expit, x0, epsilon=1e-6)
  41. assert_almost_equal(r, 0)
  42. r = optimize.check_grad(expit, der_expit, x0, epsilon=1e-6,
  43. direction='random', seed=1234)
  44. assert_almost_equal(r, 0)
  45. # Check if the epsilon parameter is being considered.
  46. r = abs(optimize.check_grad(expit, der_expit, x0, epsilon=1e-1) - 0)
  47. assert r > 1e-7
  48. r = abs(optimize.check_grad(expit, der_expit, x0, epsilon=1e-1,
  49. direction='random', seed=1234) - 0)
  50. assert r > 1e-7
  51. def x_sinx(x):
  52. return (x*np.sin(x)).sum()
  53. def der_x_sinx(x):
  54. return np.sin(x) + x*np.cos(x)
  55. x0 = np.arange(0, 2, 0.2)
  56. r = optimize.check_grad(x_sinx, der_x_sinx, x0,
  57. direction='random', seed=1234)
  58. assert_almost_equal(r, 0)
  59. assert_raises(ValueError, optimize.check_grad,
  60. x_sinx, der_x_sinx, x0,
  61. direction='random_projection', seed=1234)
  62. # checking can be done for derivatives of vector valued functions
  63. r = optimize.check_grad(himmelblau_grad, himmelblau_hess, himmelblau_x0,
  64. direction='all', seed=1234)
  65. assert r < 5e-7
  66. class CheckOptimize:
  67. """ Base test case for a simple constrained entropy maximization problem
  68. (the machine translation example of Berger et al in
  69. Computational Linguistics, vol 22, num 1, pp 39--72, 1996.)
  70. """
  71. def setup_method(self):
  72. self.F = np.array([[1, 1, 1],
  73. [1, 1, 0],
  74. [1, 0, 1],
  75. [1, 0, 0],
  76. [1, 0, 0]])
  77. self.K = np.array([1., 0.3, 0.5])
  78. self.startparams = np.zeros(3, np.float64)
  79. self.solution = np.array([0., -0.524869316, 0.487525860])
  80. self.maxiter = 1000
  81. self.funccalls = 0
  82. self.gradcalls = 0
  83. self.trace = []
  84. def func(self, x):
  85. self.funccalls += 1
  86. if self.funccalls > 6000:
  87. raise RuntimeError("too many iterations in optimization routine")
  88. log_pdot = np.dot(self.F, x)
  89. logZ = np.log(sum(np.exp(log_pdot)))
  90. f = logZ - np.dot(self.K, x)
  91. self.trace.append(np.copy(x))
  92. return f
  93. def grad(self, x):
  94. self.gradcalls += 1
  95. log_pdot = np.dot(self.F, x)
  96. logZ = np.log(sum(np.exp(log_pdot)))
  97. p = np.exp(log_pdot - logZ)
  98. return np.dot(self.F.transpose(), p) - self.K
  99. def hess(self, x):
  100. log_pdot = np.dot(self.F, x)
  101. logZ = np.log(sum(np.exp(log_pdot)))
  102. p = np.exp(log_pdot - logZ)
  103. return np.dot(self.F.T,
  104. np.dot(np.diag(p), self.F - np.dot(self.F.T, p)))
  105. def hessp(self, x, p):
  106. return np.dot(self.hess(x), p)
  107. class CheckOptimizeParameterized(CheckOptimize):
  108. def test_cg(self):
  109. # conjugate gradient optimization routine
  110. if self.use_wrapper:
  111. opts = {'maxiter': self.maxiter, 'disp': self.disp,
  112. 'return_all': False}
  113. res = optimize.minimize(self.func, self.startparams, args=(),
  114. method='CG', jac=self.grad,
  115. options=opts)
  116. params, fopt, func_calls, grad_calls, warnflag = \
  117. res['x'], res['fun'], res['nfev'], res['njev'], res['status']
  118. else:
  119. retval = optimize.fmin_cg(self.func, self.startparams,
  120. self.grad, (), maxiter=self.maxiter,
  121. full_output=True, disp=self.disp,
  122. retall=False)
  123. (params, fopt, func_calls, grad_calls, warnflag) = retval
  124. assert_allclose(self.func(params), self.func(self.solution),
  125. atol=1e-6)
  126. # Ensure that function call counts are 'known good'; these are from
  127. # SciPy 0.7.0. Don't allow them to increase.
  128. assert self.funccalls == 9, self.funccalls
  129. assert self.gradcalls == 7, self.gradcalls
  130. # Ensure that the function behaves the same; this is from SciPy 0.7.0
  131. assert_allclose(self.trace[2:4],
  132. [[0, -0.5, 0.5],
  133. [0, -5.05700028e-01, 4.95985862e-01]],
  134. atol=1e-14, rtol=1e-7)
  135. def test_cg_cornercase(self):
  136. def f(r):
  137. return 2.5 * (1 - np.exp(-1.5*(r - 0.5)))**2
  138. # Check several initial guesses. (Too far away from the
  139. # minimum, the function ends up in the flat region of exp.)
  140. for x0 in np.linspace(-0.75, 3, 71):
  141. sol = optimize.minimize(f, [x0], method='CG')
  142. assert sol.success
  143. assert_allclose(sol.x, [0.5], rtol=1e-5)
  144. def test_bfgs(self):
  145. # Broyden-Fletcher-Goldfarb-Shanno optimization routine
  146. if self.use_wrapper:
  147. opts = {'maxiter': self.maxiter, 'disp': self.disp,
  148. 'return_all': False}
  149. res = optimize.minimize(self.func, self.startparams,
  150. jac=self.grad, method='BFGS', args=(),
  151. options=opts)
  152. params, fopt, gopt, Hopt, func_calls, grad_calls, warnflag = (
  153. res['x'], res['fun'], res['jac'], res['hess_inv'],
  154. res['nfev'], res['njev'], res['status'])
  155. else:
  156. retval = optimize.fmin_bfgs(self.func, self.startparams, self.grad,
  157. args=(), maxiter=self.maxiter,
  158. full_output=True, disp=self.disp,
  159. retall=False)
  160. (params, fopt, gopt, Hopt,
  161. func_calls, grad_calls, warnflag) = retval
  162. assert_allclose(self.func(params), self.func(self.solution),
  163. atol=1e-6)
  164. # Ensure that function call counts are 'known good'; these are from
  165. # SciPy 0.7.0. Don't allow them to increase.
  166. assert self.funccalls == 10, self.funccalls
  167. assert self.gradcalls == 8, self.gradcalls
  168. # Ensure that the function behaves the same; this is from SciPy 0.7.0
  169. assert_allclose(self.trace[6:8],
  170. [[0, -5.25060743e-01, 4.87748473e-01],
  171. [0, -5.24885582e-01, 4.87530347e-01]],
  172. atol=1e-14, rtol=1e-7)
  173. def test_bfgs_infinite(self):
  174. # Test corner case where -Inf is the minimum. See gh-2019.
  175. func = lambda x: -np.e**-x
  176. fprime = lambda x: -func(x)
  177. x0 = [0]
  178. with np.errstate(over='ignore'):
  179. if self.use_wrapper:
  180. opts = {'disp': self.disp}
  181. x = optimize.minimize(func, x0, jac=fprime, method='BFGS',
  182. args=(), options=opts)['x']
  183. else:
  184. x = optimize.fmin_bfgs(func, x0, fprime, disp=self.disp)
  185. assert not np.isfinite(func(x))
  186. def test_bfgs_xrtol(self):
  187. # test for #17345 to test xrtol parameter
  188. x0 = [1.3, 0.7, 0.8, 1.9, 1.2]
  189. res = optimize.minimize(optimize.rosen,
  190. x0, method='bfgs', options={'xrtol': 1e-3})
  191. ref = optimize.minimize(optimize.rosen,
  192. x0, method='bfgs', options={'gtol': 1e-3})
  193. assert res.nit != ref.nit
  194. def test_powell(self):
  195. # Powell (direction set) optimization routine
  196. if self.use_wrapper:
  197. opts = {'maxiter': self.maxiter, 'disp': self.disp,
  198. 'return_all': False}
  199. res = optimize.minimize(self.func, self.startparams, args=(),
  200. method='Powell', options=opts)
  201. params, fopt, direc, numiter, func_calls, warnflag = (
  202. res['x'], res['fun'], res['direc'], res['nit'],
  203. res['nfev'], res['status'])
  204. else:
  205. retval = optimize.fmin_powell(self.func, self.startparams,
  206. args=(), maxiter=self.maxiter,
  207. full_output=True, disp=self.disp,
  208. retall=False)
  209. (params, fopt, direc, numiter, func_calls, warnflag) = retval
  210. assert_allclose(self.func(params), self.func(self.solution),
  211. atol=1e-6)
  212. # params[0] does not affect the objective function
  213. assert_allclose(params[1:], self.solution[1:], atol=5e-6)
  214. # Ensure that function call counts are 'known good'; these are from
  215. # SciPy 0.7.0. Don't allow them to increase.
  216. #
  217. # However, some leeway must be added: the exact evaluation
  218. # count is sensitive to numerical error, and floating-point
  219. # computations are not bit-for-bit reproducible across
  220. # machines, and when using e.g., MKL, data alignment
  221. # etc., affect the rounding error.
  222. #
  223. assert self.funccalls <= 116 + 20, self.funccalls
  224. assert self.gradcalls == 0, self.gradcalls
  225. @pytest.mark.xfail(reason="This part of test_powell fails on some "
  226. "platforms, but the solution returned by powell is "
  227. "still valid.")
  228. def test_powell_gh14014(self):
  229. # This part of test_powell started failing on some CI platforms;
  230. # see gh-14014. Since the solution is still correct and the comments
  231. # in test_powell suggest that small differences in the bits are known
  232. # to change the "trace" of the solution, seems safe to xfail to get CI
  233. # green now and investigate later.
  234. # Powell (direction set) optimization routine
  235. if self.use_wrapper:
  236. opts = {'maxiter': self.maxiter, 'disp': self.disp,
  237. 'return_all': False}
  238. res = optimize.minimize(self.func, self.startparams, args=(),
  239. method='Powell', options=opts)
  240. params, fopt, direc, numiter, func_calls, warnflag = (
  241. res['x'], res['fun'], res['direc'], res['nit'],
  242. res['nfev'], res['status'])
  243. else:
  244. retval = optimize.fmin_powell(self.func, self.startparams,
  245. args=(), maxiter=self.maxiter,
  246. full_output=True, disp=self.disp,
  247. retall=False)
  248. (params, fopt, direc, numiter, func_calls, warnflag) = retval
  249. # Ensure that the function behaves the same; this is from SciPy 0.7.0
  250. assert_allclose(self.trace[34:39],
  251. [[0.72949016, -0.44156936, 0.47100962],
  252. [0.72949016, -0.44156936, 0.48052496],
  253. [1.45898031, -0.88313872, 0.95153458],
  254. [0.72949016, -0.44156936, 0.47576729],
  255. [1.72949016, -0.44156936, 0.47576729]],
  256. atol=1e-14, rtol=1e-7)
  257. def test_powell_bounded(self):
  258. # Powell (direction set) optimization routine
  259. # same as test_powell above, but with bounds
  260. bounds = [(-np.pi, np.pi) for _ in self.startparams]
  261. if self.use_wrapper:
  262. opts = {'maxiter': self.maxiter, 'disp': self.disp,
  263. 'return_all': False}
  264. res = optimize.minimize(self.func, self.startparams, args=(),
  265. bounds=bounds,
  266. method='Powell', options=opts)
  267. params, fopt, direc, numiter, func_calls, warnflag = (
  268. res['x'], res['fun'], res['direc'], res['nit'],
  269. res['nfev'], res['status'])
  270. assert func_calls == self.funccalls
  271. assert_allclose(self.func(params), self.func(self.solution),
  272. atol=1e-6, rtol=1e-5)
  273. # The exact evaluation count is sensitive to numerical error, and
  274. # floating-point computations are not bit-for-bit reproducible
  275. # across machines, and when using e.g. MKL, data alignment etc.
  276. # affect the rounding error.
  277. # It takes 155 calls on my machine, but we can add the same +20
  278. # margin as is used in `test_powell`
  279. assert self.funccalls <= 155 + 20
  280. assert self.gradcalls == 0
  281. def test_neldermead(self):
  282. # Nelder-Mead simplex algorithm
  283. if self.use_wrapper:
  284. opts = {'maxiter': self.maxiter, 'disp': self.disp,
  285. 'return_all': False}
  286. res = optimize.minimize(self.func, self.startparams, args=(),
  287. method='Nelder-mead', options=opts)
  288. params, fopt, numiter, func_calls, warnflag = (
  289. res['x'], res['fun'], res['nit'], res['nfev'],
  290. res['status'])
  291. else:
  292. retval = optimize.fmin(self.func, self.startparams,
  293. args=(), maxiter=self.maxiter,
  294. full_output=True, disp=self.disp,
  295. retall=False)
  296. (params, fopt, numiter, func_calls, warnflag) = retval
  297. assert_allclose(self.func(params), self.func(self.solution),
  298. atol=1e-6)
  299. # Ensure that function call counts are 'known good'; these are from
  300. # SciPy 0.7.0. Don't allow them to increase.
  301. assert self.funccalls == 167, self.funccalls
  302. assert self.gradcalls == 0, self.gradcalls
  303. # Ensure that the function behaves the same; this is from SciPy 0.7.0
  304. assert_allclose(self.trace[76:78],
  305. [[0.1928968, -0.62780447, 0.35166118],
  306. [0.19572515, -0.63648426, 0.35838135]],
  307. atol=1e-14, rtol=1e-7)
  308. def test_neldermead_initial_simplex(self):
  309. # Nelder-Mead simplex algorithm
  310. simplex = np.zeros((4, 3))
  311. simplex[...] = self.startparams
  312. for j in range(3):
  313. simplex[j+1, j] += 0.1
  314. if self.use_wrapper:
  315. opts = {'maxiter': self.maxiter, 'disp': False,
  316. 'return_all': True, 'initial_simplex': simplex}
  317. res = optimize.minimize(self.func, self.startparams, args=(),
  318. method='Nelder-mead', options=opts)
  319. params, fopt, numiter, func_calls, warnflag = (res['x'],
  320. res['fun'],
  321. res['nit'],
  322. res['nfev'],
  323. res['status'])
  324. assert_allclose(res['allvecs'][0], simplex[0])
  325. else:
  326. retval = optimize.fmin(self.func, self.startparams,
  327. args=(), maxiter=self.maxiter,
  328. full_output=True, disp=False, retall=False,
  329. initial_simplex=simplex)
  330. (params, fopt, numiter, func_calls, warnflag) = retval
  331. assert_allclose(self.func(params), self.func(self.solution),
  332. atol=1e-6)
  333. # Ensure that function call counts are 'known good'; these are from
  334. # SciPy 0.17.0. Don't allow them to increase.
  335. assert self.funccalls == 100, self.funccalls
  336. assert self.gradcalls == 0, self.gradcalls
  337. # Ensure that the function behaves the same; this is from SciPy 0.15.0
  338. assert_allclose(self.trace[50:52],
  339. [[0.14687474, -0.5103282, 0.48252111],
  340. [0.14474003, -0.5282084, 0.48743951]],
  341. atol=1e-14, rtol=1e-7)
  342. def test_neldermead_initial_simplex_bad(self):
  343. # Check it fails with a bad simplices
  344. bad_simplices = []
  345. simplex = np.zeros((3, 2))
  346. simplex[...] = self.startparams[:2]
  347. for j in range(2):
  348. simplex[j+1, j] += 0.1
  349. bad_simplices.append(simplex)
  350. simplex = np.zeros((3, 3))
  351. bad_simplices.append(simplex)
  352. for simplex in bad_simplices:
  353. if self.use_wrapper:
  354. opts = {'maxiter': self.maxiter, 'disp': False,
  355. 'return_all': False, 'initial_simplex': simplex}
  356. assert_raises(ValueError,
  357. optimize.minimize,
  358. self.func,
  359. self.startparams,
  360. args=(),
  361. method='Nelder-mead',
  362. options=opts)
  363. else:
  364. assert_raises(ValueError, optimize.fmin,
  365. self.func, self.startparams,
  366. args=(), maxiter=self.maxiter,
  367. full_output=True, disp=False, retall=False,
  368. initial_simplex=simplex)
  369. def test_ncg_negative_maxiter(self):
  370. # Regression test for gh-8241
  371. opts = {'maxiter': -1}
  372. result = optimize.minimize(self.func, self.startparams,
  373. method='Newton-CG', jac=self.grad,
  374. args=(), options=opts)
  375. assert result.status == 1
  376. def test_ncg(self):
  377. # line-search Newton conjugate gradient optimization routine
  378. if self.use_wrapper:
  379. opts = {'maxiter': self.maxiter, 'disp': self.disp,
  380. 'return_all': False}
  381. retval = optimize.minimize(self.func, self.startparams,
  382. method='Newton-CG', jac=self.grad,
  383. args=(), options=opts)['x']
  384. else:
  385. retval = optimize.fmin_ncg(self.func, self.startparams, self.grad,
  386. args=(), maxiter=self.maxiter,
  387. full_output=False, disp=self.disp,
  388. retall=False)
  389. params = retval
  390. assert_allclose(self.func(params), self.func(self.solution),
  391. atol=1e-6)
  392. # Ensure that function call counts are 'known good'; these are from
  393. # SciPy 0.7.0. Don't allow them to increase.
  394. assert self.funccalls == 7, self.funccalls
  395. assert self.gradcalls <= 22, self.gradcalls # 0.13.0
  396. # assert self.gradcalls <= 18, self.gradcalls # 0.9.0
  397. # assert self.gradcalls == 18, self.gradcalls # 0.8.0
  398. # assert self.gradcalls == 22, self.gradcalls # 0.7.0
  399. # Ensure that the function behaves the same; this is from SciPy 0.7.0
  400. assert_allclose(self.trace[3:5],
  401. [[-4.35700753e-07, -5.24869435e-01, 4.87527480e-01],
  402. [-4.35700753e-07, -5.24869401e-01, 4.87527774e-01]],
  403. atol=1e-6, rtol=1e-7)
  404. def test_ncg_hess(self):
  405. # Newton conjugate gradient with Hessian
  406. if self.use_wrapper:
  407. opts = {'maxiter': self.maxiter, 'disp': self.disp,
  408. 'return_all': False}
  409. retval = optimize.minimize(self.func, self.startparams,
  410. method='Newton-CG', jac=self.grad,
  411. hess=self.hess,
  412. args=(), options=opts)['x']
  413. else:
  414. retval = optimize.fmin_ncg(self.func, self.startparams, self.grad,
  415. fhess=self.hess,
  416. args=(), maxiter=self.maxiter,
  417. full_output=False, disp=self.disp,
  418. retall=False)
  419. params = retval
  420. assert_allclose(self.func(params), self.func(self.solution),
  421. atol=1e-6)
  422. # Ensure that function call counts are 'known good'; these are from
  423. # SciPy 0.7.0. Don't allow them to increase.
  424. assert self.funccalls <= 7, self.funccalls # gh10673
  425. assert self.gradcalls <= 18, self.gradcalls # 0.9.0
  426. # assert self.gradcalls == 18, self.gradcalls # 0.8.0
  427. # assert self.gradcalls == 22, self.gradcalls # 0.7.0
  428. # Ensure that the function behaves the same; this is from SciPy 0.7.0
  429. assert_allclose(self.trace[3:5],
  430. [[-4.35700753e-07, -5.24869435e-01, 4.87527480e-01],
  431. [-4.35700753e-07, -5.24869401e-01, 4.87527774e-01]],
  432. atol=1e-6, rtol=1e-7)
  433. def test_ncg_hessp(self):
  434. # Newton conjugate gradient with Hessian times a vector p.
  435. if self.use_wrapper:
  436. opts = {'maxiter': self.maxiter, 'disp': self.disp,
  437. 'return_all': False}
  438. retval = optimize.minimize(self.func, self.startparams,
  439. method='Newton-CG', jac=self.grad,
  440. hessp=self.hessp,
  441. args=(), options=opts)['x']
  442. else:
  443. retval = optimize.fmin_ncg(self.func, self.startparams, self.grad,
  444. fhess_p=self.hessp,
  445. args=(), maxiter=self.maxiter,
  446. full_output=False, disp=self.disp,
  447. retall=False)
  448. params = retval
  449. assert_allclose(self.func(params), self.func(self.solution),
  450. atol=1e-6)
  451. # Ensure that function call counts are 'known good'; these are from
  452. # SciPy 0.7.0. Don't allow them to increase.
  453. assert self.funccalls <= 7, self.funccalls # gh10673
  454. assert self.gradcalls <= 18, self.gradcalls # 0.9.0
  455. # assert self.gradcalls == 18, self.gradcalls # 0.8.0
  456. # assert self.gradcalls == 22, self.gradcalls # 0.7.0
  457. # Ensure that the function behaves the same; this is from SciPy 0.7.0
  458. assert_allclose(self.trace[3:5],
  459. [[-4.35700753e-07, -5.24869435e-01, 4.87527480e-01],
  460. [-4.35700753e-07, -5.24869401e-01, 4.87527774e-01]],
  461. atol=1e-6, rtol=1e-7)
  462. def test_maxfev_test():
  463. rng = np.random.default_rng(271707100830272976862395227613146332411)
  464. def cost(x):
  465. return rng.random(1) * 1000 # never converged problem
  466. for imaxfev in [1, 10, 50]:
  467. # "TNC" and "L-BFGS-B" also supports max function evaluation, but
  468. # these may violate the limit because of evaluating gradients
  469. # by numerical differentiation. See the discussion in PR #14805.
  470. for method in ['Powell', 'Nelder-Mead']:
  471. result = optimize.minimize(cost, rng.random(10),
  472. method=method,
  473. options={'maxfev': imaxfev})
  474. assert result["nfev"] == imaxfev
  475. def test_wrap_scalar_function_with_validation():
  476. def func_(x):
  477. return x
  478. fcalls, func = optimize._optimize.\
  479. _wrap_scalar_function_maxfun_validation(func_, np.asarray(1), 5)
  480. for i in range(5):
  481. func(np.asarray(i))
  482. assert fcalls[0] == i+1
  483. msg = "Too many function calls"
  484. with assert_raises(optimize._optimize._MaxFuncCallError, match=msg):
  485. func(np.asarray(i)) # exceeded maximum function call
  486. fcalls, func = optimize._optimize.\
  487. _wrap_scalar_function_maxfun_validation(func_, np.asarray(1), 5)
  488. msg = "The user-provided objective function must return a scalar value."
  489. with assert_raises(ValueError, match=msg):
  490. func(np.array([1, 1]))
  491. def test_obj_func_returns_scalar():
  492. match = ("The user-provided "
  493. "objective function must "
  494. "return a scalar value.")
  495. with assert_raises(ValueError, match=match):
  496. optimize.minimize(lambda x: x, np.array([1, 1]), method='BFGS')
  497. def test_neldermead_iteration_num():
  498. x0 = np.array([1.3, 0.7, 0.8, 1.9, 1.2])
  499. res = optimize._minimize._minimize_neldermead(optimize.rosen, x0,
  500. xatol=1e-8)
  501. assert res.nit <= 339
  502. def test_neldermead_xatol_fatol():
  503. # gh4484
  504. # test we can call with fatol, xatol specified
  505. func = lambda x: x[0]**2 + x[1]**2
  506. optimize._minimize._minimize_neldermead(func, [1, 1], maxiter=2,
  507. xatol=1e-3, fatol=1e-3)
  508. def test_neldermead_adaptive():
  509. func = lambda x: np.sum(x**2)
  510. p0 = [0.15746215, 0.48087031, 0.44519198, 0.4223638, 0.61505159,
  511. 0.32308456, 0.9692297, 0.4471682, 0.77411992, 0.80441652,
  512. 0.35994957, 0.75487856, 0.99973421, 0.65063887, 0.09626474]
  513. res = optimize.minimize(func, p0, method='Nelder-Mead')
  514. assert_equal(res.success, False)
  515. res = optimize.minimize(func, p0, method='Nelder-Mead',
  516. options={'adaptive': True})
  517. assert_equal(res.success, True)
  518. def test_bounded_powell_outsidebounds():
  519. # With the bounded Powell method if you start outside the bounds the final
  520. # should still be within the bounds (provided that the user doesn't make a
  521. # bad choice for the `direc` argument).
  522. func = lambda x: np.sum(x**2)
  523. bounds = (-1, 1), (-1, 1), (-1, 1)
  524. x0 = [-4, .5, -.8]
  525. # we're starting outside the bounds, so we should get a warning
  526. with assert_warns(optimize.OptimizeWarning):
  527. res = optimize.minimize(func, x0, bounds=bounds, method="Powell")
  528. assert_allclose(res.x, np.array([0.] * len(x0)), atol=1e-6)
  529. assert_equal(res.success, True)
  530. assert_equal(res.status, 0)
  531. # However, now if we change the `direc` argument such that the
  532. # set of vectors does not span the parameter space, then we may
  533. # not end up back within the bounds. Here we see that the first
  534. # parameter cannot be updated!
  535. direc = [[0, 0, 0], [0, 1, 0], [0, 0, 1]]
  536. # we're starting outside the bounds, so we should get a warning
  537. with assert_warns(optimize.OptimizeWarning):
  538. res = optimize.minimize(func, x0,
  539. bounds=bounds, method="Powell",
  540. options={'direc': direc})
  541. assert_allclose(res.x, np.array([-4., 0, 0]), atol=1e-6)
  542. assert_equal(res.success, False)
  543. assert_equal(res.status, 4)
  544. def test_bounded_powell_vs_powell():
  545. # here we test an example where the bounded Powell method
  546. # will return a different result than the standard Powell
  547. # method.
  548. # first we test a simple example where the minimum is at
  549. # the origin and the minimum that is within the bounds is
  550. # larger than the minimum at the origin.
  551. func = lambda x: np.sum(x**2)
  552. bounds = (-5, -1), (-10, -0.1), (1, 9.2), (-4, 7.6), (-15.9, -2)
  553. x0 = [-2.1, -5.2, 1.9, 0, -2]
  554. options = {'ftol': 1e-10, 'xtol': 1e-10}
  555. res_powell = optimize.minimize(func, x0, method="Powell", options=options)
  556. assert_allclose(res_powell.x, 0., atol=1e-6)
  557. assert_allclose(res_powell.fun, 0., atol=1e-6)
  558. res_bounded_powell = optimize.minimize(func, x0, options=options,
  559. bounds=bounds,
  560. method="Powell")
  561. p = np.array([-1, -0.1, 1, 0, -2])
  562. assert_allclose(res_bounded_powell.x, p, atol=1e-6)
  563. assert_allclose(res_bounded_powell.fun, func(p), atol=1e-6)
  564. # now we test bounded Powell but with a mix of inf bounds.
  565. bounds = (None, -1), (-np.inf, -.1), (1, np.inf), (-4, None), (-15.9, -2)
  566. res_bounded_powell = optimize.minimize(func, x0, options=options,
  567. bounds=bounds,
  568. method="Powell")
  569. p = np.array([-1, -0.1, 1, 0, -2])
  570. assert_allclose(res_bounded_powell.x, p, atol=1e-6)
  571. assert_allclose(res_bounded_powell.fun, func(p), atol=1e-6)
  572. # next we test an example where the global minimum is within
  573. # the bounds, but the bounded Powell method performs better
  574. # than the standard Powell method.
  575. def func(x):
  576. t = np.sin(-x[0]) * np.cos(x[1]) * np.sin(-x[0] * x[1]) * np.cos(x[1])
  577. t -= np.cos(np.sin(x[1] * x[2]) * np.cos(x[2]))
  578. return t**2
  579. bounds = [(-2, 5)] * 3
  580. x0 = [-0.5, -0.5, -0.5]
  581. res_powell = optimize.minimize(func, x0, method="Powell")
  582. res_bounded_powell = optimize.minimize(func, x0,
  583. bounds=bounds,
  584. method="Powell")
  585. assert_allclose(res_powell.fun, 0.007136253919761627, atol=1e-6)
  586. assert_allclose(res_bounded_powell.fun, 0, atol=1e-6)
  587. # next we test the previous example where the we provide Powell
  588. # with (-inf, inf) bounds, and compare it to providing Powell
  589. # with no bounds. They should end up the same.
  590. bounds = [(-np.inf, np.inf)] * 3
  591. res_bounded_powell = optimize.minimize(func, x0,
  592. bounds=bounds,
  593. method="Powell")
  594. assert_allclose(res_powell.fun, res_bounded_powell.fun, atol=1e-6)
  595. assert_allclose(res_powell.nfev, res_bounded_powell.nfev, atol=1e-6)
  596. assert_allclose(res_powell.x, res_bounded_powell.x, atol=1e-6)
  597. # now test when x0 starts outside of the bounds.
  598. x0 = [45.46254415, -26.52351498, 31.74830248]
  599. bounds = [(-2, 5)] * 3
  600. # we're starting outside the bounds, so we should get a warning
  601. with assert_warns(optimize.OptimizeWarning):
  602. res_bounded_powell = optimize.minimize(func, x0,
  603. bounds=bounds,
  604. method="Powell")
  605. assert_allclose(res_bounded_powell.fun, 0, atol=1e-6)
  606. def test_onesided_bounded_powell_stability():
  607. # When the Powell method is bounded on only one side, a
  608. # np.tan transform is done in order to convert it into a
  609. # completely bounded problem. Here we do some simple tests
  610. # of one-sided bounded Powell where the optimal solutions
  611. # are large to test the stability of the transformation.
  612. kwargs = {'method': 'Powell',
  613. 'bounds': [(-np.inf, 1e6)] * 3,
  614. 'options': {'ftol': 1e-8, 'xtol': 1e-8}}
  615. x0 = [1, 1, 1]
  616. # df/dx is constant.
  617. f = lambda x: -np.sum(x)
  618. res = optimize.minimize(f, x0, **kwargs)
  619. assert_allclose(res.fun, -3e6, atol=1e-4)
  620. # df/dx gets smaller and smaller.
  621. def f(x):
  622. return -np.abs(np.sum(x)) ** (0.1) * (1 if np.all(x > 0) else -1)
  623. res = optimize.minimize(f, x0, **kwargs)
  624. assert_allclose(res.fun, -(3e6) ** (0.1))
  625. # df/dx gets larger and larger.
  626. def f(x):
  627. return -np.abs(np.sum(x)) ** 10 * (1 if np.all(x > 0) else -1)
  628. res = optimize.minimize(f, x0, **kwargs)
  629. assert_allclose(res.fun, -(3e6) ** 10, rtol=1e-7)
  630. # df/dx gets larger for some of the variables and smaller for others.
  631. def f(x):
  632. t = -np.abs(np.sum(x[:2])) ** 5 - np.abs(np.sum(x[2:])) ** (0.1)
  633. t *= (1 if np.all(x > 0) else -1)
  634. return t
  635. kwargs['bounds'] = [(-np.inf, 1e3)] * 3
  636. res = optimize.minimize(f, x0, **kwargs)
  637. assert_allclose(res.fun, -(2e3) ** 5 - (1e6) ** (0.1), rtol=1e-7)
  638. class TestOptimizeWrapperDisp(CheckOptimizeParameterized):
  639. use_wrapper = True
  640. disp = True
  641. class TestOptimizeWrapperNoDisp(CheckOptimizeParameterized):
  642. use_wrapper = True
  643. disp = False
  644. class TestOptimizeNoWrapperDisp(CheckOptimizeParameterized):
  645. use_wrapper = False
  646. disp = True
  647. class TestOptimizeNoWrapperNoDisp(CheckOptimizeParameterized):
  648. use_wrapper = False
  649. disp = False
  650. class TestOptimizeSimple(CheckOptimize):
  651. def test_bfgs_nan(self):
  652. # Test corner case where nan is fed to optimizer. See gh-2067.
  653. func = lambda x: x
  654. fprime = lambda x: np.ones_like(x)
  655. x0 = [np.nan]
  656. with np.errstate(over='ignore', invalid='ignore'):
  657. x = optimize.fmin_bfgs(func, x0, fprime, disp=False)
  658. assert np.isnan(func(x))
  659. def test_bfgs_nan_return(self):
  660. # Test corner cases where fun returns NaN. See gh-4793.
  661. # First case: NaN from first call.
  662. func = lambda x: np.nan
  663. with np.errstate(invalid='ignore'):
  664. result = optimize.minimize(func, 0)
  665. assert np.isnan(result['fun'])
  666. assert result['success'] is False
  667. # Second case: NaN from second call.
  668. func = lambda x: 0 if x == 0 else np.nan
  669. fprime = lambda x: np.ones_like(x) # Steer away from zero.
  670. with np.errstate(invalid='ignore'):
  671. result = optimize.minimize(func, 0, jac=fprime)
  672. assert np.isnan(result['fun'])
  673. assert result['success'] is False
  674. def test_bfgs_numerical_jacobian(self):
  675. # BFGS with numerical Jacobian and a vector epsilon parameter.
  676. # define the epsilon parameter using a random vector
  677. epsilon = np.sqrt(np.spacing(1.)) * np.random.rand(len(self.solution))
  678. params = optimize.fmin_bfgs(self.func, self.startparams,
  679. epsilon=epsilon, args=(),
  680. maxiter=self.maxiter, disp=False)
  681. assert_allclose(self.func(params), self.func(self.solution),
  682. atol=1e-6)
  683. def test_finite_differences_jac(self):
  684. methods = ['BFGS', 'CG', 'TNC']
  685. jacs = ['2-point', '3-point', None]
  686. for method, jac in itertools.product(methods, jacs):
  687. result = optimize.minimize(self.func, self.startparams,
  688. method=method, jac=jac)
  689. assert_allclose(self.func(result.x), self.func(self.solution),
  690. atol=1e-6)
  691. def test_finite_differences_hess(self):
  692. # test that all the methods that require hess can use finite-difference
  693. # For Newton-CG, trust-ncg, trust-krylov the FD estimated hessian is
  694. # wrapped in a hessp function
  695. # dogleg, trust-exact actually require true hessians at the moment, so
  696. # they're excluded.
  697. methods = ['trust-constr', 'Newton-CG', 'trust-ncg', 'trust-krylov']
  698. hesses = FD_METHODS + (optimize.BFGS,)
  699. for method, hess in itertools.product(methods, hesses):
  700. if hess is optimize.BFGS:
  701. hess = hess()
  702. result = optimize.minimize(self.func, self.startparams,
  703. method=method, jac=self.grad,
  704. hess=hess)
  705. assert result.success
  706. # check that the methods demand some sort of Hessian specification
  707. # Newton-CG creates its own hessp, and trust-constr doesn't need a hess
  708. # specified either
  709. methods = ['trust-ncg', 'trust-krylov', 'dogleg', 'trust-exact']
  710. for method in methods:
  711. with pytest.raises(ValueError):
  712. optimize.minimize(self.func, self.startparams,
  713. method=method, jac=self.grad,
  714. hess=None)
  715. def test_bfgs_gh_2169(self):
  716. def f(x):
  717. if x < 0:
  718. return 1.79769313e+308
  719. else:
  720. return x + 1./x
  721. xs = optimize.fmin_bfgs(f, [10.], disp=False)
  722. assert_allclose(xs, 1.0, rtol=1e-4, atol=1e-4)
  723. def test_bfgs_double_evaluations(self):
  724. # check BFGS does not evaluate twice in a row at same point
  725. def f(x):
  726. xp = x[0]
  727. assert xp not in seen
  728. seen.add(xp)
  729. return 10*x**2, 20*x
  730. seen = set()
  731. optimize.minimize(f, -100, method='bfgs', jac=True, tol=1e-7)
  732. def test_l_bfgs_b(self):
  733. # limited-memory bound-constrained BFGS algorithm
  734. retval = optimize.fmin_l_bfgs_b(self.func, self.startparams,
  735. self.grad, args=(),
  736. maxiter=self.maxiter)
  737. (params, fopt, d) = retval
  738. assert_allclose(self.func(params), self.func(self.solution),
  739. atol=1e-6)
  740. # Ensure that function call counts are 'known good'; these are from
  741. # SciPy 0.7.0. Don't allow them to increase.
  742. assert self.funccalls == 7, self.funccalls
  743. assert self.gradcalls == 5, self.gradcalls
  744. # Ensure that the function behaves the same; this is from SciPy 0.7.0
  745. # test fixed in gh10673
  746. assert_allclose(self.trace[3:5],
  747. [[8.117083e-16, -5.196198e-01, 4.897617e-01],
  748. [0., -0.52489628, 0.48753042]],
  749. atol=1e-14, rtol=1e-7)
  750. def test_l_bfgs_b_numjac(self):
  751. # L-BFGS-B with numerical Jacobian
  752. retval = optimize.fmin_l_bfgs_b(self.func, self.startparams,
  753. approx_grad=True,
  754. maxiter=self.maxiter)
  755. (params, fopt, d) = retval
  756. assert_allclose(self.func(params), self.func(self.solution),
  757. atol=1e-6)
  758. def test_l_bfgs_b_funjac(self):
  759. # L-BFGS-B with combined objective function and Jacobian
  760. def fun(x):
  761. return self.func(x), self.grad(x)
  762. retval = optimize.fmin_l_bfgs_b(fun, self.startparams,
  763. maxiter=self.maxiter)
  764. (params, fopt, d) = retval
  765. assert_allclose(self.func(params), self.func(self.solution),
  766. atol=1e-6)
  767. def test_l_bfgs_b_maxiter(self):
  768. # gh7854
  769. # Ensure that not more than maxiters are ever run.
  770. class Callback:
  771. def __init__(self):
  772. self.nit = 0
  773. self.fun = None
  774. self.x = None
  775. def __call__(self, x):
  776. self.x = x
  777. self.fun = optimize.rosen(x)
  778. self.nit += 1
  779. c = Callback()
  780. res = optimize.minimize(optimize.rosen, [0., 0.], method='l-bfgs-b',
  781. callback=c, options={'maxiter': 5})
  782. assert_equal(res.nit, 5)
  783. assert_almost_equal(res.x, c.x)
  784. assert_almost_equal(res.fun, c.fun)
  785. assert_equal(res.status, 1)
  786. assert res.success is False
  787. assert_equal(res.message,
  788. 'STOP: TOTAL NO. of ITERATIONS REACHED LIMIT')
  789. def test_minimize_l_bfgs_b(self):
  790. # Minimize with L-BFGS-B method
  791. opts = {'disp': False, 'maxiter': self.maxiter}
  792. r = optimize.minimize(self.func, self.startparams,
  793. method='L-BFGS-B', jac=self.grad,
  794. options=opts)
  795. assert_allclose(self.func(r.x), self.func(self.solution),
  796. atol=1e-6)
  797. assert self.gradcalls == r.njev
  798. self.funccalls = self.gradcalls = 0
  799. # approximate jacobian
  800. ra = optimize.minimize(self.func, self.startparams,
  801. method='L-BFGS-B', options=opts)
  802. # check that function evaluations in approximate jacobian are counted
  803. # assert_(ra.nfev > r.nfev)
  804. assert self.funccalls == ra.nfev
  805. assert_allclose(self.func(ra.x), self.func(self.solution),
  806. atol=1e-6)
  807. self.funccalls = self.gradcalls = 0
  808. # approximate jacobian
  809. ra = optimize.minimize(self.func, self.startparams, jac='3-point',
  810. method='L-BFGS-B', options=opts)
  811. assert self.funccalls == ra.nfev
  812. assert_allclose(self.func(ra.x), self.func(self.solution),
  813. atol=1e-6)
  814. def test_minimize_l_bfgs_b_ftol(self):
  815. # Check that the `ftol` parameter in l_bfgs_b works as expected
  816. v0 = None
  817. for tol in [1e-1, 1e-4, 1e-7, 1e-10]:
  818. opts = {'disp': False, 'maxiter': self.maxiter, 'ftol': tol}
  819. sol = optimize.minimize(self.func, self.startparams,
  820. method='L-BFGS-B', jac=self.grad,
  821. options=opts)
  822. v = self.func(sol.x)
  823. if v0 is None:
  824. v0 = v
  825. else:
  826. assert v < v0
  827. assert_allclose(v, self.func(self.solution), rtol=tol)
  828. def test_minimize_l_bfgs_maxls(self):
  829. # check that the maxls is passed down to the Fortran routine
  830. sol = optimize.minimize(optimize.rosen, np.array([-1.2, 1.0]),
  831. method='L-BFGS-B', jac=optimize.rosen_der,
  832. options={'disp': False, 'maxls': 1})
  833. assert not sol.success
  834. def test_minimize_l_bfgs_b_maxfun_interruption(self):
  835. # gh-6162
  836. f = optimize.rosen
  837. g = optimize.rosen_der
  838. values = []
  839. x0 = np.full(7, 1000)
  840. def objfun(x):
  841. value = f(x)
  842. values.append(value)
  843. return value
  844. # Look for an interesting test case.
  845. # Request a maxfun that stops at a particularly bad function
  846. # evaluation somewhere between 100 and 300 evaluations.
  847. low, medium, high = 30, 100, 300
  848. optimize.fmin_l_bfgs_b(objfun, x0, fprime=g, maxfun=high)
  849. v, k = max((y, i) for i, y in enumerate(values[medium:]))
  850. maxfun = medium + k
  851. # If the minimization strategy is reasonable,
  852. # the minimize() result should not be worse than the best
  853. # of the first 30 function evaluations.
  854. target = min(values[:low])
  855. xmin, fmin, d = optimize.fmin_l_bfgs_b(f, x0, fprime=g, maxfun=maxfun)
  856. assert_array_less(fmin, target)
  857. def test_custom(self):
  858. # This function comes from the documentation example.
  859. def custmin(fun, x0, args=(), maxfev=None, stepsize=0.1,
  860. maxiter=100, callback=None, **options):
  861. bestx = x0
  862. besty = fun(x0)
  863. funcalls = 1
  864. niter = 0
  865. improved = True
  866. stop = False
  867. while improved and not stop and niter < maxiter:
  868. improved = False
  869. niter += 1
  870. for dim in range(np.size(x0)):
  871. for s in [bestx[dim] - stepsize, bestx[dim] + stepsize]:
  872. testx = np.copy(bestx)
  873. testx[dim] = s
  874. testy = fun(testx, *args)
  875. funcalls += 1
  876. if testy < besty:
  877. besty = testy
  878. bestx = testx
  879. improved = True
  880. if callback is not None:
  881. callback(bestx)
  882. if maxfev is not None and funcalls >= maxfev:
  883. stop = True
  884. break
  885. return optimize.OptimizeResult(fun=besty, x=bestx, nit=niter,
  886. nfev=funcalls, success=(niter > 1))
  887. x0 = [1.35, 0.9, 0.8, 1.1, 1.2]
  888. res = optimize.minimize(optimize.rosen, x0, method=custmin,
  889. options=dict(stepsize=0.05))
  890. assert_allclose(res.x, 1.0, rtol=1e-4, atol=1e-4)
  891. @pytest.mark.xfail(reason="output not reliable on all platforms")
  892. def test_gh13321(self, capfd):
  893. # gh-13321 reported issues with console output in fmin_l_bfgs_b;
  894. # check that iprint=0 works.
  895. kwargs = {'func': optimize.rosen, 'x0': [4, 3],
  896. 'fprime': optimize.rosen_der, 'bounds': ((3, 5), (3, 5))}
  897. # "L-BFGS-B" is always in output; should show when iprint >= 0
  898. # "At iterate" is iterate info; should show when iprint >= 1
  899. optimize.fmin_l_bfgs_b(**kwargs, iprint=-1)
  900. out, _ = capfd.readouterr()
  901. assert "L-BFGS-B" not in out and "At iterate" not in out
  902. optimize.fmin_l_bfgs_b(**kwargs, iprint=0)
  903. out, _ = capfd.readouterr()
  904. assert "L-BFGS-B" in out and "At iterate" not in out
  905. optimize.fmin_l_bfgs_b(**kwargs, iprint=1)
  906. out, _ = capfd.readouterr()
  907. assert "L-BFGS-B" in out and "At iterate" in out
  908. # `disp is not None` overrides `iprint` behavior
  909. # `disp=0` should suppress all output
  910. # `disp=1` should be the same as `iprint = 1`
  911. optimize.fmin_l_bfgs_b(**kwargs, iprint=1, disp=False)
  912. out, _ = capfd.readouterr()
  913. assert "L-BFGS-B" not in out and "At iterate" not in out
  914. optimize.fmin_l_bfgs_b(**kwargs, iprint=-1, disp=True)
  915. out, _ = capfd.readouterr()
  916. assert "L-BFGS-B" in out and "At iterate" in out
  917. def test_gh10771(self):
  918. # check that minimize passes bounds and constraints to a custom
  919. # minimizer without altering them.
  920. bounds = [(-2, 2), (0, 3)]
  921. constraints = 'constraints'
  922. def custmin(fun, x0, **options):
  923. assert options['bounds'] is bounds
  924. assert options['constraints'] is constraints
  925. return optimize.OptimizeResult()
  926. x0 = [1, 1]
  927. optimize.minimize(optimize.rosen, x0, method=custmin,
  928. bounds=bounds, constraints=constraints)
  929. def test_minimize_tol_parameter(self):
  930. # Check that the minimize() tol= argument does something
  931. def func(z):
  932. x, y = z
  933. return x**2*y**2 + x**4 + 1
  934. def dfunc(z):
  935. x, y = z
  936. return np.array([2*x*y**2 + 4*x**3, 2*x**2*y])
  937. for method in ['nelder-mead', 'powell', 'cg', 'bfgs',
  938. 'newton-cg', 'l-bfgs-b', 'tnc',
  939. 'cobyla', 'slsqp']:
  940. if method in ('nelder-mead', 'powell', 'cobyla'):
  941. jac = None
  942. else:
  943. jac = dfunc
  944. sol1 = optimize.minimize(func, [1, 1], jac=jac, tol=1e-10,
  945. method=method)
  946. sol2 = optimize.minimize(func, [1, 1], jac=jac, tol=1.0,
  947. method=method)
  948. assert func(sol1.x) < func(sol2.x), "%s: %s vs. %s" % (method, func(sol1.x), func(sol2.x))
  949. @pytest.mark.parametrize('method',
  950. ['fmin', 'fmin_powell', 'fmin_cg', 'fmin_bfgs',
  951. 'fmin_ncg', 'fmin_l_bfgs_b', 'fmin_tnc',
  952. 'fmin_slsqp'] + MINIMIZE_METHODS)
  953. def test_minimize_callback_copies_array(self, method):
  954. # Check that arrays passed to callbacks are not modified
  955. # inplace by the optimizer afterward
  956. if method in ('fmin_tnc', 'fmin_l_bfgs_b'):
  957. func = lambda x: (optimize.rosen(x), optimize.rosen_der(x))
  958. else:
  959. func = optimize.rosen
  960. jac = optimize.rosen_der
  961. hess = optimize.rosen_hess
  962. x0 = np.zeros(10)
  963. # Set options
  964. kwargs = {}
  965. if method.startswith('fmin'):
  966. routine = getattr(optimize, method)
  967. if method == 'fmin_slsqp':
  968. kwargs['iter'] = 5
  969. elif method == 'fmin_tnc':
  970. kwargs['maxfun'] = 100
  971. elif method in ('fmin', 'fmin_powell'):
  972. kwargs['maxiter'] = 3500
  973. else:
  974. kwargs['maxiter'] = 5
  975. else:
  976. def routine(*a, **kw):
  977. kw['method'] = method
  978. return optimize.minimize(*a, **kw)
  979. if method == 'tnc':
  980. kwargs['options'] = dict(maxfun=100)
  981. else:
  982. kwargs['options'] = dict(maxiter=5)
  983. if method in ('fmin_ncg',):
  984. kwargs['fprime'] = jac
  985. elif method in ('newton-cg',):
  986. kwargs['jac'] = jac
  987. elif method in ('trust-krylov', 'trust-exact', 'trust-ncg', 'dogleg',
  988. 'trust-constr'):
  989. kwargs['jac'] = jac
  990. kwargs['hess'] = hess
  991. # Run with callback
  992. results = []
  993. def callback(x, *args, **kwargs):
  994. results.append((x, np.copy(x)))
  995. routine(func, x0, callback=callback, **kwargs)
  996. # Check returned arrays coincide with their copies
  997. # and have no memory overlap
  998. assert len(results) > 2
  999. assert all(np.all(x == y) for x, y in results)
  1000. assert not any(np.may_share_memory(x[0], y[0]) for x, y in itertools.combinations(results, 2))
  1001. @pytest.mark.parametrize('method', ['nelder-mead', 'powell', 'cg',
  1002. 'bfgs', 'newton-cg', 'l-bfgs-b',
  1003. 'tnc', 'cobyla', 'slsqp'])
  1004. def test_no_increase(self, method):
  1005. # Check that the solver doesn't return a value worse than the
  1006. # initial point.
  1007. def func(x):
  1008. return (x - 1)**2
  1009. def bad_grad(x):
  1010. # purposefully invalid gradient function, simulates a case
  1011. # where line searches start failing
  1012. return 2*(x - 1) * (-1) - 2
  1013. x0 = np.array([2.0])
  1014. f0 = func(x0)
  1015. jac = bad_grad
  1016. options = dict(maxfun=20) if method == 'tnc' else dict(maxiter=20)
  1017. if method in ['nelder-mead', 'powell', 'cobyla']:
  1018. jac = None
  1019. sol = optimize.minimize(func, x0, jac=jac, method=method,
  1020. options=options)
  1021. assert_equal(func(sol.x), sol.fun)
  1022. if method == 'slsqp':
  1023. pytest.xfail("SLSQP returns slightly worse")
  1024. assert func(sol.x) <= f0
  1025. def test_slsqp_respect_bounds(self):
  1026. # Regression test for gh-3108
  1027. def f(x):
  1028. return sum((x - np.array([1., 2., 3., 4.]))**2)
  1029. def cons(x):
  1030. a = np.array([[-1, -1, -1, -1], [-3, -3, -2, -1]])
  1031. return np.concatenate([np.dot(a, x) + np.array([5, 10]), x])
  1032. x0 = np.array([0.5, 1., 1.5, 2.])
  1033. res = optimize.minimize(f, x0, method='slsqp',
  1034. constraints={'type': 'ineq', 'fun': cons})
  1035. assert_allclose(res.x, np.array([0., 2, 5, 8])/3, atol=1e-12)
  1036. @pytest.mark.parametrize('method', ['Nelder-Mead', 'Powell', 'CG', 'BFGS',
  1037. 'Newton-CG', 'L-BFGS-B', 'SLSQP',
  1038. 'trust-constr', 'dogleg', 'trust-ncg',
  1039. 'trust-exact', 'trust-krylov'])
  1040. def test_respect_maxiter(self, method):
  1041. # Check that the number of iterations equals max_iter, assuming
  1042. # convergence doesn't establish before
  1043. MAXITER = 4
  1044. x0 = np.zeros(10)
  1045. sf = ScalarFunction(optimize.rosen, x0, (), optimize.rosen_der,
  1046. optimize.rosen_hess, None, None)
  1047. # Set options
  1048. kwargs = {'method': method, 'options': dict(maxiter=MAXITER)}
  1049. if method in ('Newton-CG',):
  1050. kwargs['jac'] = sf.grad
  1051. elif method in ('trust-krylov', 'trust-exact', 'trust-ncg', 'dogleg',
  1052. 'trust-constr'):
  1053. kwargs['jac'] = sf.grad
  1054. kwargs['hess'] = sf.hess
  1055. sol = optimize.minimize(sf.fun, x0, **kwargs)
  1056. assert sol.nit == MAXITER
  1057. assert sol.nfev >= sf.nfev
  1058. if hasattr(sol, 'njev'):
  1059. assert sol.njev >= sf.ngev
  1060. # method specific tests
  1061. if method == 'SLSQP':
  1062. assert sol.status == 9 # Iteration limit reached
  1063. @pytest.mark.parametrize('method', ['Nelder-Mead', 'Powell',
  1064. 'fmin', 'fmin_powell'])
  1065. def test_runtime_warning(self, method):
  1066. x0 = np.zeros(10)
  1067. sf = ScalarFunction(optimize.rosen, x0, (), optimize.rosen_der,
  1068. optimize.rosen_hess, None, None)
  1069. options = {"maxiter": 1, "disp": True}
  1070. with pytest.warns(RuntimeWarning,
  1071. match=r'Maximum number of iterations'):
  1072. if method.startswith('fmin'):
  1073. routine = getattr(optimize, method)
  1074. routine(sf.fun, x0, **options)
  1075. else:
  1076. optimize.minimize(sf.fun, x0, method=method, options=options)
  1077. def test_respect_maxiter_trust_constr_ineq_constraints(self):
  1078. # special case of minimization with trust-constr and inequality
  1079. # constraints to check maxiter limit is obeyed when using internal
  1080. # method 'tr_interior_point'
  1081. MAXITER = 4
  1082. f = optimize.rosen
  1083. jac = optimize.rosen_der
  1084. hess = optimize.rosen_hess
  1085. fun = lambda x: np.array([0.2 * x[0] - 0.4 * x[1] - 0.33 * x[2]])
  1086. cons = ({'type': 'ineq',
  1087. 'fun': fun},)
  1088. x0 = np.zeros(10)
  1089. sol = optimize.minimize(f, x0, constraints=cons, jac=jac, hess=hess,
  1090. method='trust-constr',
  1091. options=dict(maxiter=MAXITER))
  1092. assert sol.nit == MAXITER
  1093. def test_minimize_automethod(self):
  1094. def f(x):
  1095. return x**2
  1096. def cons(x):
  1097. return x - 2
  1098. x0 = np.array([10.])
  1099. sol_0 = optimize.minimize(f, x0)
  1100. sol_1 = optimize.minimize(f, x0, constraints=[{'type': 'ineq',
  1101. 'fun': cons}])
  1102. sol_2 = optimize.minimize(f, x0, bounds=[(5, 10)])
  1103. sol_3 = optimize.minimize(f, x0,
  1104. constraints=[{'type': 'ineq', 'fun': cons}],
  1105. bounds=[(5, 10)])
  1106. sol_4 = optimize.minimize(f, x0,
  1107. constraints=[{'type': 'ineq', 'fun': cons}],
  1108. bounds=[(1, 10)])
  1109. for sol in [sol_0, sol_1, sol_2, sol_3, sol_4]:
  1110. assert sol.success
  1111. assert_allclose(sol_0.x, 0, atol=1e-7)
  1112. assert_allclose(sol_1.x, 2, atol=1e-7)
  1113. assert_allclose(sol_2.x, 5, atol=1e-7)
  1114. assert_allclose(sol_3.x, 5, atol=1e-7)
  1115. assert_allclose(sol_4.x, 2, atol=1e-7)
  1116. def test_minimize_coerce_args_param(self):
  1117. # Regression test for gh-3503
  1118. def Y(x, c):
  1119. return np.sum((x-c)**2)
  1120. def dY_dx(x, c=None):
  1121. return 2*(x-c)
  1122. c = np.array([3, 1, 4, 1, 5, 9, 2, 6, 5, 3, 5])
  1123. xinit = np.random.randn(len(c))
  1124. optimize.minimize(Y, xinit, jac=dY_dx, args=(c), method="BFGS")
  1125. def test_initial_step_scaling(self):
  1126. # Check that optimizer initial step is not huge even if the
  1127. # function and gradients are
  1128. scales = [1e-50, 1, 1e50]
  1129. methods = ['CG', 'BFGS', 'L-BFGS-B', 'Newton-CG']
  1130. def f(x):
  1131. if first_step_size[0] is None and x[0] != x0[0]:
  1132. first_step_size[0] = abs(x[0] - x0[0])
  1133. if abs(x).max() > 1e4:
  1134. raise AssertionError("Optimization stepped far away!")
  1135. return scale*(x[0] - 1)**2
  1136. def g(x):
  1137. return np.array([scale*(x[0] - 1)])
  1138. for scale, method in itertools.product(scales, methods):
  1139. if method in ('CG', 'BFGS'):
  1140. options = dict(gtol=scale*1e-8)
  1141. else:
  1142. options = dict()
  1143. if scale < 1e-10 and method in ('L-BFGS-B', 'Newton-CG'):
  1144. # XXX: return initial point if they see small gradient
  1145. continue
  1146. x0 = [-1.0]
  1147. first_step_size = [None]
  1148. res = optimize.minimize(f, x0, jac=g, method=method,
  1149. options=options)
  1150. err_msg = "{0} {1}: {2}: {3}".format(method, scale,
  1151. first_step_size,
  1152. res)
  1153. assert res.success, err_msg
  1154. assert_allclose(res.x, [1.0], err_msg=err_msg)
  1155. assert res.nit <= 3, err_msg
  1156. if scale > 1e-10:
  1157. if method in ('CG', 'BFGS'):
  1158. assert_allclose(first_step_size[0], 1.01, err_msg=err_msg)
  1159. else:
  1160. # Newton-CG and L-BFGS-B use different logic for the first
  1161. # step, but are both scaling invariant with step sizes ~ 1
  1162. assert first_step_size[0] > 0.5 and first_step_size[0] < 3, err_msg
  1163. else:
  1164. # step size has upper bound of ||grad||, so line
  1165. # search makes many small steps
  1166. pass
  1167. @pytest.mark.parametrize('method', ['nelder-mead', 'powell', 'cg', 'bfgs',
  1168. 'newton-cg', 'l-bfgs-b', 'tnc',
  1169. 'cobyla', 'slsqp', 'trust-constr',
  1170. 'dogleg', 'trust-ncg', 'trust-exact',
  1171. 'trust-krylov'])
  1172. def test_nan_values(self, method):
  1173. # Check nan values result to failed exit status
  1174. np.random.seed(1234)
  1175. count = [0]
  1176. def func(x):
  1177. return np.nan
  1178. def func2(x):
  1179. count[0] += 1
  1180. if count[0] > 2:
  1181. return np.nan
  1182. else:
  1183. return np.random.rand()
  1184. def grad(x):
  1185. return np.array([1.0])
  1186. def hess(x):
  1187. return np.array([[1.0]])
  1188. x0 = np.array([1.0])
  1189. needs_grad = method in ('newton-cg', 'trust-krylov', 'trust-exact',
  1190. 'trust-ncg', 'dogleg')
  1191. needs_hess = method in ('trust-krylov', 'trust-exact', 'trust-ncg',
  1192. 'dogleg')
  1193. funcs = [func, func2]
  1194. grads = [grad] if needs_grad else [grad, None]
  1195. hesss = [hess] if needs_hess else [hess, None]
  1196. options = dict(maxfun=20) if method == 'tnc' else dict(maxiter=20)
  1197. with np.errstate(invalid='ignore'), suppress_warnings() as sup:
  1198. sup.filter(UserWarning, "delta_grad == 0.*")
  1199. sup.filter(RuntimeWarning, ".*does not use Hessian.*")
  1200. sup.filter(RuntimeWarning, ".*does not use gradient.*")
  1201. for f, g, h in itertools.product(funcs, grads, hesss):
  1202. count = [0]
  1203. sol = optimize.minimize(f, x0, jac=g, hess=h, method=method,
  1204. options=options)
  1205. assert_equal(sol.success, False)
  1206. @pytest.mark.parametrize('method', ['nelder-mead', 'cg', 'bfgs',
  1207. 'l-bfgs-b', 'tnc',
  1208. 'cobyla', 'slsqp', 'trust-constr',
  1209. 'dogleg', 'trust-ncg', 'trust-exact',
  1210. 'trust-krylov'])
  1211. def test_duplicate_evaluations(self, method):
  1212. # check that there are no duplicate evaluations for any methods
  1213. jac = hess = None
  1214. if method in ('newton-cg', 'trust-krylov', 'trust-exact',
  1215. 'trust-ncg', 'dogleg'):
  1216. jac = self.grad
  1217. if method in ('trust-krylov', 'trust-exact', 'trust-ncg',
  1218. 'dogleg'):
  1219. hess = self.hess
  1220. with np.errstate(invalid='ignore'), suppress_warnings() as sup:
  1221. # for trust-constr
  1222. sup.filter(UserWarning, "delta_grad == 0.*")
  1223. optimize.minimize(self.func, self.startparams,
  1224. method=method, jac=jac, hess=hess)
  1225. for i in range(1, len(self.trace)):
  1226. if np.array_equal(self.trace[i - 1], self.trace[i]):
  1227. raise RuntimeError(
  1228. "Duplicate evaluations made by {}".format(method))
  1229. @pytest.mark.parametrize(
  1230. 'method',
  1231. ['l-bfgs-b', 'tnc', 'Powell', 'Nelder-Mead']
  1232. )
  1233. def test_minimize_with_scalar(method):
  1234. # checks that minimize works with a scalar being provided to it.
  1235. def f(x):
  1236. return np.sum(x ** 2)
  1237. res = optimize.minimize(f, 17, bounds=[(-100, 100)], method=method)
  1238. assert res.success
  1239. assert_allclose(res.x, [0.0], atol=1e-5)
  1240. class TestLBFGSBBounds:
  1241. def setup_method(self):
  1242. self.bounds = ((1, None), (None, None))
  1243. self.solution = (1, 0)
  1244. def fun(self, x, p=2.0):
  1245. return 1.0 / p * (x[0]**p + x[1]**p)
  1246. def jac(self, x, p=2.0):
  1247. return x**(p - 1)
  1248. def fj(self, x, p=2.0):
  1249. return self.fun(x, p), self.jac(x, p)
  1250. def test_l_bfgs_b_bounds(self):
  1251. x, f, d = optimize.fmin_l_bfgs_b(self.fun, [0, -1],
  1252. fprime=self.jac,
  1253. bounds=self.bounds)
  1254. assert d['warnflag'] == 0, d['task']
  1255. assert_allclose(x, self.solution, atol=1e-6)
  1256. def test_l_bfgs_b_funjac(self):
  1257. # L-BFGS-B with fun and jac combined and extra arguments
  1258. x, f, d = optimize.fmin_l_bfgs_b(self.fj, [0, -1], args=(2.0, ),
  1259. bounds=self.bounds)
  1260. assert d['warnflag'] == 0, d['task']
  1261. assert_allclose(x, self.solution, atol=1e-6)
  1262. def test_minimize_l_bfgs_b_bounds(self):
  1263. # Minimize with method='L-BFGS-B' with bounds
  1264. res = optimize.minimize(self.fun, [0, -1], method='L-BFGS-B',
  1265. jac=self.jac, bounds=self.bounds)
  1266. assert res['success'], res['message']
  1267. assert_allclose(res.x, self.solution, atol=1e-6)
  1268. @pytest.mark.parametrize('bounds', [
  1269. ([(10, 1), (1, 10)]),
  1270. ([(1, 10), (10, 1)]),
  1271. ([(10, 1), (10, 1)])
  1272. ])
  1273. def test_minimize_l_bfgs_b_incorrect_bounds(self, bounds):
  1274. with pytest.raises(ValueError, match='.*bounds.*'):
  1275. optimize.minimize(self.fun, [0, -1], method='L-BFGS-B',
  1276. jac=self.jac, bounds=bounds)
  1277. def test_minimize_l_bfgs_b_bounds_FD(self):
  1278. # test that initial starting value outside bounds doesn't raise
  1279. # an error (done with clipping).
  1280. # test all different finite differences combos, with and without args
  1281. jacs = ['2-point', '3-point', None]
  1282. argss = [(2.,), ()]
  1283. for jac, args in itertools.product(jacs, argss):
  1284. res = optimize.minimize(self.fun, [0, -1], args=args,
  1285. method='L-BFGS-B',
  1286. jac=jac, bounds=self.bounds,
  1287. options={'finite_diff_rel_step': None})
  1288. assert res['success'], res['message']
  1289. assert_allclose(res.x, self.solution, atol=1e-6)
  1290. class TestOptimizeScalar:
  1291. def setup_method(self):
  1292. self.solution = 1.5
  1293. def fun(self, x, a=1.5):
  1294. """Objective function"""
  1295. return (x - a)**2 - 0.8
  1296. def test_brent(self):
  1297. x = optimize.brent(self.fun)
  1298. assert_allclose(x, self.solution, atol=1e-6)
  1299. x = optimize.brent(self.fun, brack=(-3, -2))
  1300. assert_allclose(x, self.solution, atol=1e-6)
  1301. x = optimize.brent(self.fun, full_output=True)
  1302. assert_allclose(x[0], self.solution, atol=1e-6)
  1303. x = optimize.brent(self.fun, brack=(-15, -1, 15))
  1304. assert_allclose(x, self.solution, atol=1e-6)
  1305. message = r"\(f\(xb\) < f\(xa\)\) and \(f\(xb\) < f\(xc\)\)"
  1306. with pytest.raises(ValueError, match=message):
  1307. optimize.brent(self.fun, brack=(-1, 0, 1))
  1308. message = r"\(xa < xb\) and \(xb < xc\)"
  1309. with pytest.raises(ValueError, match=message):
  1310. optimize.brent(self.fun, brack=(0, -1, 1))
  1311. def test_golden(self):
  1312. x = optimize.golden(self.fun)
  1313. assert_allclose(x, self.solution, atol=1e-6)
  1314. x = optimize.golden(self.fun, brack=(-3, -2))
  1315. assert_allclose(x, self.solution, atol=1e-6)
  1316. x = optimize.golden(self.fun, full_output=True)
  1317. assert_allclose(x[0], self.solution, atol=1e-6)
  1318. x = optimize.golden(self.fun, brack=(-15, -1, 15))
  1319. assert_allclose(x, self.solution, atol=1e-6)
  1320. x = optimize.golden(self.fun, tol=0)
  1321. assert_allclose(x, self.solution)
  1322. maxiter_test_cases = [0, 1, 5]
  1323. for maxiter in maxiter_test_cases:
  1324. x0 = optimize.golden(self.fun, maxiter=0, full_output=True)
  1325. x = optimize.golden(self.fun, maxiter=maxiter, full_output=True)
  1326. nfev0, nfev = x0[2], x[2]
  1327. assert_equal(nfev - nfev0, maxiter)
  1328. message = r"\(f\(xb\) < f\(xa\)\) and \(f\(xb\) < f\(xc\)\)"
  1329. with pytest.raises(ValueError, match=message):
  1330. optimize.golden(self.fun, brack=(-1, 0, 1))
  1331. message = r"\(xa < xb\) and \(xb < xc\)"
  1332. with pytest.raises(ValueError, match=message):
  1333. optimize.golden(self.fun, brack=(0, -1, 1))
  1334. def test_fminbound(self):
  1335. x = optimize.fminbound(self.fun, 0, 1)
  1336. assert_allclose(x, 1, atol=1e-4)
  1337. x = optimize.fminbound(self.fun, 1, 5)
  1338. assert_allclose(x, self.solution, atol=1e-6)
  1339. x = optimize.fminbound(self.fun, np.array([1]), np.array([5]))
  1340. assert_allclose(x, self.solution, atol=1e-6)
  1341. assert_raises(ValueError, optimize.fminbound, self.fun, 5, 1)
  1342. def test_fminbound_scalar(self):
  1343. with pytest.raises(ValueError, match='.*must be finite scalars.*'):
  1344. optimize.fminbound(self.fun, np.zeros((1, 2)), 1)
  1345. x = optimize.fminbound(self.fun, 1, np.array(5))
  1346. assert_allclose(x, self.solution, atol=1e-6)
  1347. def test_gh11207(self):
  1348. def fun(x):
  1349. return x**2
  1350. optimize.fminbound(fun, 0, 0)
  1351. def test_minimize_scalar(self):
  1352. # combine all tests above for the minimize_scalar wrapper
  1353. x = optimize.minimize_scalar(self.fun).x
  1354. assert_allclose(x, self.solution, atol=1e-6)
  1355. x = optimize.minimize_scalar(self.fun, method='Brent')
  1356. assert x.success
  1357. x = optimize.minimize_scalar(self.fun, method='Brent',
  1358. options=dict(maxiter=3))
  1359. assert not x.success
  1360. x = optimize.minimize_scalar(self.fun, bracket=(-3, -2),
  1361. args=(1.5, ), method='Brent').x
  1362. assert_allclose(x, self.solution, atol=1e-6)
  1363. x = optimize.minimize_scalar(self.fun, method='Brent',
  1364. args=(1.5,)).x
  1365. assert_allclose(x, self.solution, atol=1e-6)
  1366. x = optimize.minimize_scalar(self.fun, bracket=(-15, -1, 15),
  1367. args=(1.5, ), method='Brent').x
  1368. assert_allclose(x, self.solution, atol=1e-6)
  1369. x = optimize.minimize_scalar(self.fun, bracket=(-3, -2),
  1370. args=(1.5, ), method='golden').x
  1371. assert_allclose(x, self.solution, atol=1e-6)
  1372. x = optimize.minimize_scalar(self.fun, method='golden',
  1373. args=(1.5,)).x
  1374. assert_allclose(x, self.solution, atol=1e-6)
  1375. x = optimize.minimize_scalar(self.fun, bracket=(-15, -1, 15),
  1376. args=(1.5, ), method='golden').x
  1377. assert_allclose(x, self.solution, atol=1e-6)
  1378. x = optimize.minimize_scalar(self.fun, bounds=(0, 1), args=(1.5,),
  1379. method='Bounded').x
  1380. assert_allclose(x, 1, atol=1e-4)
  1381. x = optimize.minimize_scalar(self.fun, bounds=(1, 5), args=(1.5, ),
  1382. method='bounded').x
  1383. assert_allclose(x, self.solution, atol=1e-6)
  1384. x = optimize.minimize_scalar(self.fun, bounds=(np.array([1]),
  1385. np.array([5])),
  1386. args=(np.array([1.5]), ),
  1387. method='bounded').x
  1388. assert_allclose(x, self.solution, atol=1e-6)
  1389. assert_raises(ValueError, optimize.minimize_scalar, self.fun,
  1390. bounds=(5, 1), method='bounded', args=(1.5, ))
  1391. assert_raises(ValueError, optimize.minimize_scalar, self.fun,
  1392. bounds=(np.zeros(2), 1), method='bounded', args=(1.5, ))
  1393. x = optimize.minimize_scalar(self.fun, bounds=(1, np.array(5)),
  1394. method='bounded').x
  1395. assert_allclose(x, self.solution, atol=1e-6)
  1396. def test_minimize_scalar_custom(self):
  1397. # This function comes from the documentation example.
  1398. def custmin(fun, bracket, args=(), maxfev=None, stepsize=0.1,
  1399. maxiter=100, callback=None, **options):
  1400. bestx = (bracket[1] + bracket[0]) / 2.0
  1401. besty = fun(bestx)
  1402. funcalls = 1
  1403. niter = 0
  1404. improved = True
  1405. stop = False
  1406. while improved and not stop and niter < maxiter:
  1407. improved = False
  1408. niter += 1
  1409. for testx in [bestx - stepsize, bestx + stepsize]:
  1410. testy = fun(testx, *args)
  1411. funcalls += 1
  1412. if testy < besty:
  1413. besty = testy
  1414. bestx = testx
  1415. improved = True
  1416. if callback is not None:
  1417. callback(bestx)
  1418. if maxfev is not None and funcalls >= maxfev:
  1419. stop = True
  1420. break
  1421. return optimize.OptimizeResult(fun=besty, x=bestx, nit=niter,
  1422. nfev=funcalls, success=(niter > 1))
  1423. res = optimize.minimize_scalar(self.fun, bracket=(0, 4),
  1424. method=custmin,
  1425. options=dict(stepsize=0.05))
  1426. assert_allclose(res.x, self.solution, atol=1e-6)
  1427. def test_minimize_scalar_coerce_args_param(self):
  1428. # Regression test for gh-3503
  1429. optimize.minimize_scalar(self.fun, args=1.5)
  1430. @pytest.mark.parametrize('method', ['brent', 'bounded', 'golden'])
  1431. def test_disp(self, method):
  1432. # test that all minimize_scalar methods accept a disp option.
  1433. for disp in [0, 1, 2, 3]:
  1434. optimize.minimize_scalar(self.fun, options={"disp": disp})
  1435. @pytest.mark.parametrize('method', ['brent', 'bounded', 'golden'])
  1436. def test_result_attributes(self, method):
  1437. kwargs = {"bounds": [-10, 10]} if method == 'bounded' else {}
  1438. result = optimize.minimize_scalar(self.fun, method=method, **kwargs)
  1439. assert hasattr(result, "x")
  1440. assert hasattr(result, "success")
  1441. assert hasattr(result, "message")
  1442. assert hasattr(result, "fun")
  1443. assert hasattr(result, "nfev")
  1444. assert hasattr(result, "nit")
  1445. @pytest.mark.parametrize('method', ['brent', 'bounded', 'golden'])
  1446. def test_nan_values(self, method):
  1447. # Check nan values result to failed exit status
  1448. np.random.seed(1234)
  1449. count = [0]
  1450. def func(x):
  1451. count[0] += 1
  1452. if count[0] > 4:
  1453. return np.nan
  1454. else:
  1455. return x**2 + 0.1 * np.sin(x)
  1456. bracket = (-1, 0, 1)
  1457. bounds = (-1, 1)
  1458. with np.errstate(invalid='ignore'), suppress_warnings() as sup:
  1459. sup.filter(UserWarning, "delta_grad == 0.*")
  1460. sup.filter(RuntimeWarning, ".*does not use Hessian.*")
  1461. sup.filter(RuntimeWarning, ".*does not use gradient.*")
  1462. count = [0]
  1463. kwargs = {"bounds": bounds} if method == 'bounded' else {}
  1464. sol = optimize.minimize_scalar(func, bracket=bracket,
  1465. **kwargs, method=method,
  1466. options=dict(maxiter=20))
  1467. assert_equal(sol.success, False)
  1468. def test_minimize_scalar_defaults_gh10911(self):
  1469. # Previously, bounds were silently ignored unless `method='bounds'`
  1470. # was chosen. See gh-10911. Check that this is no longer the case.
  1471. def f(x):
  1472. return x**2
  1473. res = optimize.minimize_scalar(f)
  1474. assert_allclose(res.x, 0, atol=1e-8)
  1475. res = optimize.minimize_scalar(f, bounds=(1, 100),
  1476. options={'xatol': 1e-10})
  1477. assert_allclose(res.x, 1)
  1478. def test_minimize_non_finite_bounds_gh10911(self):
  1479. # Previously, minimize_scalar misbehaved with infinite bounds.
  1480. # See gh-10911. Check that it now raises an error, instead.
  1481. msg = "Optimization bounds must be finite scalars."
  1482. with pytest.raises(ValueError, match=msg):
  1483. optimize.minimize_scalar(np.sin, bounds=(1, np.inf))
  1484. with pytest.raises(ValueError, match=msg):
  1485. optimize.minimize_scalar(np.sin, bounds=(np.nan, 1))
  1486. @pytest.mark.parametrize("method", ['brent', 'golden'])
  1487. def test_minimize_unbounded_method_with_bounds_gh10911(self, method):
  1488. # Previously, `bounds` were silently ignored when `method='brent'` or
  1489. # `method='golden'`. See gh-10911. Check that error is now raised.
  1490. msg = "Use of `bounds` is incompatible with..."
  1491. with pytest.raises(ValueError, match=msg):
  1492. optimize.minimize_scalar(np.sin, method=method, bounds=(1, 2))
  1493. def test_brent_negative_tolerance():
  1494. assert_raises(ValueError, optimize.brent, np.cos, tol=-.01)
  1495. class TestNewtonCg:
  1496. def test_rosenbrock(self):
  1497. x0 = np.array([-1.2, 1.0])
  1498. sol = optimize.minimize(optimize.rosen, x0,
  1499. jac=optimize.rosen_der,
  1500. hess=optimize.rosen_hess,
  1501. tol=1e-5,
  1502. method='Newton-CG')
  1503. assert sol.success, sol.message
  1504. assert_allclose(sol.x, np.array([1, 1]), rtol=1e-4)
  1505. def test_himmelblau(self):
  1506. x0 = np.array(himmelblau_x0)
  1507. sol = optimize.minimize(himmelblau,
  1508. x0,
  1509. jac=himmelblau_grad,
  1510. hess=himmelblau_hess,
  1511. method='Newton-CG',
  1512. tol=1e-6)
  1513. assert sol.success, sol.message
  1514. assert_allclose(sol.x, himmelblau_xopt, rtol=1e-4)
  1515. assert_allclose(sol.fun, himmelblau_min, atol=1e-4)
  1516. def test_finite_difference(self):
  1517. x0 = np.array([-1.2, 1.0])
  1518. sol = optimize.minimize(optimize.rosen, x0,
  1519. jac=optimize.rosen_der,
  1520. hess='2-point',
  1521. tol=1e-5,
  1522. method='Newton-CG')
  1523. assert sol.success, sol.message
  1524. assert_allclose(sol.x, np.array([1, 1]), rtol=1e-4)
  1525. def test_hessian_update_strategy(self):
  1526. x0 = np.array([-1.2, 1.0])
  1527. sol = optimize.minimize(optimize.rosen, x0,
  1528. jac=optimize.rosen_der,
  1529. hess=optimize.BFGS(),
  1530. tol=1e-5,
  1531. method='Newton-CG')
  1532. assert sol.success, sol.message
  1533. assert_allclose(sol.x, np.array([1, 1]), rtol=1e-4)
  1534. def test_line_for_search():
  1535. # _line_for_search is only used in _linesearch_powell, which is also
  1536. # tested below. Thus there are more tests of _line_for_search in the
  1537. # test_linesearch_powell_bounded function.
  1538. line_for_search = optimize._optimize._line_for_search
  1539. # args are x0, alpha, lower_bound, upper_bound
  1540. # returns lmin, lmax
  1541. lower_bound = np.array([-5.3, -1, -1.5, -3])
  1542. upper_bound = np.array([1.9, 1, 2.8, 3])
  1543. # test when starting in the bounds
  1544. x0 = np.array([0., 0, 0, 0])
  1545. # and when starting outside of the bounds
  1546. x1 = np.array([0., 2, -3, 0])
  1547. all_tests = (
  1548. (x0, np.array([1., 0, 0, 0]), -5.3, 1.9),
  1549. (x0, np.array([0., 1, 0, 0]), -1, 1),
  1550. (x0, np.array([0., 0, 1, 0]), -1.5, 2.8),
  1551. (x0, np.array([0., 0, 0, 1]), -3, 3),
  1552. (x0, np.array([1., 1, 0, 0]), -1, 1),
  1553. (x0, np.array([1., 0, -1, 2]), -1.5, 1.5),
  1554. (x0, np.array([2., 0, -1, 2]), -1.5, 0.95),
  1555. (x1, np.array([1., 0, 0, 0]), -5.3, 1.9),
  1556. (x1, np.array([0., 1, 0, 0]), -3, -1),
  1557. (x1, np.array([0., 0, 1, 0]), 1.5, 5.8),
  1558. (x1, np.array([0., 0, 0, 1]), -3, 3),
  1559. (x1, np.array([1., 1, 0, 0]), -3, -1),
  1560. (x1, np.array([1., 0, -1, 0]), -5.3, -1.5),
  1561. )
  1562. for x, alpha, lmin, lmax in all_tests:
  1563. mi, ma = line_for_search(x, alpha, lower_bound, upper_bound)
  1564. assert_allclose(mi, lmin, atol=1e-6)
  1565. assert_allclose(ma, lmax, atol=1e-6)
  1566. # now with infinite bounds
  1567. lower_bound = np.array([-np.inf, -1, -np.inf, -3])
  1568. upper_bound = np.array([np.inf, 1, 2.8, np.inf])
  1569. all_tests = (
  1570. (x0, np.array([1., 0, 0, 0]), -np.inf, np.inf),
  1571. (x0, np.array([0., 1, 0, 0]), -1, 1),
  1572. (x0, np.array([0., 0, 1, 0]), -np.inf, 2.8),
  1573. (x0, np.array([0., 0, 0, 1]), -3, np.inf),
  1574. (x0, np.array([1., 1, 0, 0]), -1, 1),
  1575. (x0, np.array([1., 0, -1, 2]), -1.5, np.inf),
  1576. (x1, np.array([1., 0, 0, 0]), -np.inf, np.inf),
  1577. (x1, np.array([0., 1, 0, 0]), -3, -1),
  1578. (x1, np.array([0., 0, 1, 0]), -np.inf, 5.8),
  1579. (x1, np.array([0., 0, 0, 1]), -3, np.inf),
  1580. (x1, np.array([1., 1, 0, 0]), -3, -1),
  1581. (x1, np.array([1., 0, -1, 0]), -5.8, np.inf),
  1582. )
  1583. for x, alpha, lmin, lmax in all_tests:
  1584. mi, ma = line_for_search(x, alpha, lower_bound, upper_bound)
  1585. assert_allclose(mi, lmin, atol=1e-6)
  1586. assert_allclose(ma, lmax, atol=1e-6)
  1587. def test_linesearch_powell():
  1588. # helper function in optimize.py, not a public function.
  1589. linesearch_powell = optimize._optimize._linesearch_powell
  1590. # args are func, p, xi, fval, lower_bound=None, upper_bound=None, tol=1e-3
  1591. # returns new_fval, p + direction, direction
  1592. func = lambda x: np.sum((x - np.array([-1., 2., 1.5, -.4]))**2)
  1593. p0 = np.array([0., 0, 0, 0])
  1594. fval = func(p0)
  1595. lower_bound = np.array([-np.inf] * 4)
  1596. upper_bound = np.array([np.inf] * 4)
  1597. all_tests = (
  1598. (np.array([1., 0, 0, 0]), -1),
  1599. (np.array([0., 1, 0, 0]), 2),
  1600. (np.array([0., 0, 1, 0]), 1.5),
  1601. (np.array([0., 0, 0, 1]), -.4),
  1602. (np.array([-1., 0, 1, 0]), 1.25),
  1603. (np.array([0., 0, 1, 1]), .55),
  1604. (np.array([2., 0, -1, 1]), -.65),
  1605. )
  1606. for xi, l in all_tests:
  1607. f, p, direction = linesearch_powell(func, p0, xi,
  1608. fval=fval, tol=1e-5)
  1609. assert_allclose(f, func(l * xi), atol=1e-6)
  1610. assert_allclose(p, l * xi, atol=1e-6)
  1611. assert_allclose(direction, l * xi, atol=1e-6)
  1612. f, p, direction = linesearch_powell(func, p0, xi, tol=1e-5,
  1613. lower_bound=lower_bound,
  1614. upper_bound=upper_bound,
  1615. fval=fval)
  1616. assert_allclose(f, func(l * xi), atol=1e-6)
  1617. assert_allclose(p, l * xi, atol=1e-6)
  1618. assert_allclose(direction, l * xi, atol=1e-6)
  1619. def test_linesearch_powell_bounded():
  1620. # helper function in optimize.py, not a public function.
  1621. linesearch_powell = optimize._optimize._linesearch_powell
  1622. # args are func, p, xi, fval, lower_bound=None, upper_bound=None, tol=1e-3
  1623. # returns new_fval, p+direction, direction
  1624. func = lambda x: np.sum((x-np.array([-1., 2., 1.5, -.4]))**2)
  1625. p0 = np.array([0., 0, 0, 0])
  1626. fval = func(p0)
  1627. # first choose bounds such that the same tests from
  1628. # test_linesearch_powell should pass.
  1629. lower_bound = np.array([-2.]*4)
  1630. upper_bound = np.array([2.]*4)
  1631. all_tests = (
  1632. (np.array([1., 0, 0, 0]), -1),
  1633. (np.array([0., 1, 0, 0]), 2),
  1634. (np.array([0., 0, 1, 0]), 1.5),
  1635. (np.array([0., 0, 0, 1]), -.4),
  1636. (np.array([-1., 0, 1, 0]), 1.25),
  1637. (np.array([0., 0, 1, 1]), .55),
  1638. (np.array([2., 0, -1, 1]), -.65),
  1639. )
  1640. for xi, l in all_tests:
  1641. f, p, direction = linesearch_powell(func, p0, xi, tol=1e-5,
  1642. lower_bound=lower_bound,
  1643. upper_bound=upper_bound,
  1644. fval=fval)
  1645. assert_allclose(f, func(l * xi), atol=1e-6)
  1646. assert_allclose(p, l * xi, atol=1e-6)
  1647. assert_allclose(direction, l * xi, atol=1e-6)
  1648. # now choose bounds such that unbounded vs bounded gives different results
  1649. lower_bound = np.array([-.3]*3 + [-1])
  1650. upper_bound = np.array([.45]*3 + [.9])
  1651. all_tests = (
  1652. (np.array([1., 0, 0, 0]), -.3),
  1653. (np.array([0., 1, 0, 0]), .45),
  1654. (np.array([0., 0, 1, 0]), .45),
  1655. (np.array([0., 0, 0, 1]), -.4),
  1656. (np.array([-1., 0, 1, 0]), .3),
  1657. (np.array([0., 0, 1, 1]), .45),
  1658. (np.array([2., 0, -1, 1]), -.15),
  1659. )
  1660. for xi, l in all_tests:
  1661. f, p, direction = linesearch_powell(func, p0, xi, tol=1e-5,
  1662. lower_bound=lower_bound,
  1663. upper_bound=upper_bound,
  1664. fval=fval)
  1665. assert_allclose(f, func(l * xi), atol=1e-6)
  1666. assert_allclose(p, l * xi, atol=1e-6)
  1667. assert_allclose(direction, l * xi, atol=1e-6)
  1668. # now choose as above but start outside the bounds
  1669. p0 = np.array([-1., 0, 0, 2])
  1670. fval = func(p0)
  1671. all_tests = (
  1672. (np.array([1., 0, 0, 0]), .7),
  1673. (np.array([0., 1, 0, 0]), .45),
  1674. (np.array([0., 0, 1, 0]), .45),
  1675. (np.array([0., 0, 0, 1]), -2.4),
  1676. )
  1677. for xi, l in all_tests:
  1678. f, p, direction = linesearch_powell(func, p0, xi, tol=1e-5,
  1679. lower_bound=lower_bound,
  1680. upper_bound=upper_bound,
  1681. fval=fval)
  1682. assert_allclose(f, func(p0 + l * xi), atol=1e-6)
  1683. assert_allclose(p, p0 + l * xi, atol=1e-6)
  1684. assert_allclose(direction, l * xi, atol=1e-6)
  1685. # now mix in inf
  1686. p0 = np.array([0., 0, 0, 0])
  1687. fval = func(p0)
  1688. # now choose bounds that mix inf
  1689. lower_bound = np.array([-.3, -np.inf, -np.inf, -1])
  1690. upper_bound = np.array([np.inf, .45, np.inf, .9])
  1691. all_tests = (
  1692. (np.array([1., 0, 0, 0]), -.3),
  1693. (np.array([0., 1, 0, 0]), .45),
  1694. (np.array([0., 0, 1, 0]), 1.5),
  1695. (np.array([0., 0, 0, 1]), -.4),
  1696. (np.array([-1., 0, 1, 0]), .3),
  1697. (np.array([0., 0, 1, 1]), .55),
  1698. (np.array([2., 0, -1, 1]), -.15),
  1699. )
  1700. for xi, l in all_tests:
  1701. f, p, direction = linesearch_powell(func, p0, xi, tol=1e-5,
  1702. lower_bound=lower_bound,
  1703. upper_bound=upper_bound,
  1704. fval=fval)
  1705. assert_allclose(f, func(l * xi), atol=1e-6)
  1706. assert_allclose(p, l * xi, atol=1e-6)
  1707. assert_allclose(direction, l * xi, atol=1e-6)
  1708. # now choose as above but start outside the bounds
  1709. p0 = np.array([-1., 0, 0, 2])
  1710. fval = func(p0)
  1711. all_tests = (
  1712. (np.array([1., 0, 0, 0]), .7),
  1713. (np.array([0., 1, 0, 0]), .45),
  1714. (np.array([0., 0, 1, 0]), 1.5),
  1715. (np.array([0., 0, 0, 1]), -2.4),
  1716. )
  1717. for xi, l in all_tests:
  1718. f, p, direction = linesearch_powell(func, p0, xi, tol=1e-5,
  1719. lower_bound=lower_bound,
  1720. upper_bound=upper_bound,
  1721. fval=fval)
  1722. assert_allclose(f, func(p0 + l * xi), atol=1e-6)
  1723. assert_allclose(p, p0 + l * xi, atol=1e-6)
  1724. assert_allclose(direction, l * xi, atol=1e-6)
  1725. def test_powell_limits():
  1726. # gh15342 - powell was going outside bounds for some function evaluations.
  1727. bounds = optimize.Bounds([0, 0], [0.6, 20])
  1728. def fun(x):
  1729. a, b = x
  1730. assert (x >= bounds.lb).all() and (x <= bounds.ub).all()
  1731. return a ** 2 + b ** 2
  1732. optimize.minimize(fun, x0=[0.6, 20], method='Powell', bounds=bounds)
  1733. # Another test from the original report - gh-13411
  1734. bounds = optimize.Bounds(lb=[0,], ub=[1,], keep_feasible=[True,])
  1735. def func(x):
  1736. assert x >= 0 and x <= 1
  1737. return np.exp(x)
  1738. optimize.minimize(fun=func, x0=[0.5], method='powell', bounds=bounds)
  1739. class TestRosen:
  1740. def test_hess(self):
  1741. # Compare rosen_hess(x) times p with rosen_hess_prod(x,p). See gh-1775.
  1742. x = np.array([3, 4, 5])
  1743. p = np.array([2, 2, 2])
  1744. hp = optimize.rosen_hess_prod(x, p)
  1745. dothp = np.dot(optimize.rosen_hess(x), p)
  1746. assert_equal(hp, dothp)
  1747. def himmelblau(p):
  1748. """
  1749. R^2 -> R^1 test function for optimization. The function has four local
  1750. minima where himmelblau(xopt) == 0.
  1751. """
  1752. x, y = p
  1753. a = x*x + y - 11
  1754. b = x + y*y - 7
  1755. return a*a + b*b
  1756. def himmelblau_grad(p):
  1757. x, y = p
  1758. return np.array([4*x**3 + 4*x*y - 42*x + 2*y**2 - 14,
  1759. 2*x**2 + 4*x*y + 4*y**3 - 26*y - 22])
  1760. def himmelblau_hess(p):
  1761. x, y = p
  1762. return np.array([[12*x**2 + 4*y - 42, 4*x + 4*y],
  1763. [4*x + 4*y, 4*x + 12*y**2 - 26]])
  1764. himmelblau_x0 = [-0.27, -0.9]
  1765. himmelblau_xopt = [3, 2]
  1766. himmelblau_min = 0.0
  1767. def test_minimize_multiple_constraints():
  1768. # Regression test for gh-4240.
  1769. def func(x):
  1770. return np.array([25 - 0.2 * x[0] - 0.4 * x[1] - 0.33 * x[2]])
  1771. def func1(x):
  1772. return np.array([x[1]])
  1773. def func2(x):
  1774. return np.array([x[2]])
  1775. cons = ({'type': 'ineq', 'fun': func},
  1776. {'type': 'ineq', 'fun': func1},
  1777. {'type': 'ineq', 'fun': func2})
  1778. f = lambda x: -1 * (x[0] + x[1] + x[2])
  1779. res = optimize.minimize(f, [0, 0, 0], method='SLSQP', constraints=cons)
  1780. assert_allclose(res.x, [125, 0, 0], atol=1e-10)
  1781. class TestOptimizeResultAttributes:
  1782. # Test that all minimizers return an OptimizeResult containing
  1783. # all the OptimizeResult attributes
  1784. def setup_method(self):
  1785. self.x0 = [5, 5]
  1786. self.func = optimize.rosen
  1787. self.jac = optimize.rosen_der
  1788. self.hess = optimize.rosen_hess
  1789. self.hessp = optimize.rosen_hess_prod
  1790. self.bounds = [(0., 10.), (0., 10.)]
  1791. def test_attributes_present(self):
  1792. attributes = ['nit', 'nfev', 'x', 'success', 'status', 'fun',
  1793. 'message']
  1794. skip = {'cobyla': ['nit']}
  1795. for method in MINIMIZE_METHODS:
  1796. with suppress_warnings() as sup:
  1797. sup.filter(RuntimeWarning,
  1798. ("Method .+ does not use (gradient|Hessian.*)"
  1799. " information"))
  1800. res = optimize.minimize(self.func, self.x0, method=method,
  1801. jac=self.jac, hess=self.hess,
  1802. hessp=self.hessp)
  1803. for attribute in attributes:
  1804. if method in skip and attribute in skip[method]:
  1805. continue
  1806. assert hasattr(res, attribute)
  1807. assert attribute in dir(res)
  1808. # gh13001, OptimizeResult.message should be a str
  1809. assert isinstance(res.message, str)
  1810. def f1(z, *params):
  1811. x, y = z
  1812. a, b, c, d, e, f, g, h, i, j, k, l, scale = params
  1813. return (a * x**2 + b * x * y + c * y**2 + d*x + e*y + f)
  1814. def f2(z, *params):
  1815. x, y = z
  1816. a, b, c, d, e, f, g, h, i, j, k, l, scale = params
  1817. return (-g*np.exp(-((x-h)**2 + (y-i)**2) / scale))
  1818. def f3(z, *params):
  1819. x, y = z
  1820. a, b, c, d, e, f, g, h, i, j, k, l, scale = params
  1821. return (-j*np.exp(-((x-k)**2 + (y-l)**2) / scale))
  1822. def brute_func(z, *params):
  1823. return f1(z, *params) + f2(z, *params) + f3(z, *params)
  1824. class TestBrute:
  1825. # Test the "brute force" method
  1826. def setup_method(self):
  1827. self.params = (2, 3, 7, 8, 9, 10, 44, -1, 2, 26, 1, -2, 0.5)
  1828. self.rranges = (slice(-4, 4, 0.25), slice(-4, 4, 0.25))
  1829. self.solution = np.array([-1.05665192, 1.80834843])
  1830. def brute_func(self, z, *params):
  1831. # an instance method optimizing
  1832. return brute_func(z, *params)
  1833. def test_brute(self):
  1834. # test fmin
  1835. resbrute = optimize.brute(brute_func, self.rranges, args=self.params,
  1836. full_output=True, finish=optimize.fmin)
  1837. assert_allclose(resbrute[0], self.solution, atol=1e-3)
  1838. assert_allclose(resbrute[1], brute_func(self.solution, *self.params),
  1839. atol=1e-3)
  1840. # test minimize
  1841. resbrute = optimize.brute(brute_func, self.rranges, args=self.params,
  1842. full_output=True,
  1843. finish=optimize.minimize)
  1844. assert_allclose(resbrute[0], self.solution, atol=1e-3)
  1845. assert_allclose(resbrute[1], brute_func(self.solution, *self.params),
  1846. atol=1e-3)
  1847. # test that brute can optimize an instance method (the other tests use
  1848. # a non-class based function
  1849. resbrute = optimize.brute(self.brute_func, self.rranges,
  1850. args=self.params, full_output=True,
  1851. finish=optimize.minimize)
  1852. assert_allclose(resbrute[0], self.solution, atol=1e-3)
  1853. def test_1D(self):
  1854. # test that for a 1-D problem the test function is passed an array,
  1855. # not a scalar.
  1856. def f(x):
  1857. assert len(x.shape) == 1
  1858. assert x.shape[0] == 1
  1859. return x ** 2
  1860. optimize.brute(f, [(-1, 1)], Ns=3, finish=None)
  1861. def test_workers(self):
  1862. # check that parallel evaluation works
  1863. resbrute = optimize.brute(brute_func, self.rranges, args=self.params,
  1864. full_output=True, finish=None)
  1865. resbrute1 = optimize.brute(brute_func, self.rranges, args=self.params,
  1866. full_output=True, finish=None, workers=2)
  1867. assert_allclose(resbrute1[-1], resbrute[-1])
  1868. assert_allclose(resbrute1[0], resbrute[0])
  1869. def test_runtime_warning(self):
  1870. rng = np.random.default_rng(1234)
  1871. def func(z, *params):
  1872. return rng.random(1) * 1000 # never converged problem
  1873. with pytest.warns(RuntimeWarning,
  1874. match=r'Either final optimization did not succeed'):
  1875. optimize.brute(func, self.rranges, args=self.params, disp=True)
  1876. def test_coerce_args_param(self):
  1877. # optimize.brute should coerce non-iterable args to a tuple.
  1878. def f(x, *args):
  1879. return x ** args[0]
  1880. resbrute = optimize.brute(f, (slice(-4, 4, .25),), args=2)
  1881. assert_allclose(resbrute, 0)
  1882. def test_cobyla_threadsafe():
  1883. # Verify that cobyla is threadsafe. Will segfault if it is not.
  1884. import concurrent.futures
  1885. import time
  1886. def objective1(x):
  1887. time.sleep(0.1)
  1888. return x[0]**2
  1889. def objective2(x):
  1890. time.sleep(0.1)
  1891. return (x[0]-1)**2
  1892. min_method = "COBYLA"
  1893. def minimizer1():
  1894. return optimize.minimize(objective1,
  1895. [0.0],
  1896. method=min_method)
  1897. def minimizer2():
  1898. return optimize.minimize(objective2,
  1899. [0.0],
  1900. method=min_method)
  1901. with concurrent.futures.ThreadPoolExecutor() as pool:
  1902. tasks = []
  1903. tasks.append(pool.submit(minimizer1))
  1904. tasks.append(pool.submit(minimizer2))
  1905. for t in tasks:
  1906. res = t.result()
  1907. class TestIterationLimits:
  1908. # Tests that optimisation does not give up before trying requested
  1909. # number of iterations or evaluations. And that it does not succeed
  1910. # by exceeding the limits.
  1911. def setup_method(self):
  1912. self.funcalls = 0
  1913. def slow_func(self, v):
  1914. self.funcalls += 1
  1915. r, t = np.sqrt(v[0]**2+v[1]**2), np.arctan2(v[0], v[1])
  1916. return np.sin(r*20 + t)+r*0.5
  1917. def test_neldermead_limit(self):
  1918. self.check_limits("Nelder-Mead", 200)
  1919. def test_powell_limit(self):
  1920. self.check_limits("powell", 1000)
  1921. def check_limits(self, method, default_iters):
  1922. for start_v in [[0.1, 0.1], [1, 1], [2, 2]]:
  1923. for mfev in [50, 500, 5000]:
  1924. self.funcalls = 0
  1925. res = optimize.minimize(self.slow_func, start_v,
  1926. method=method,
  1927. options={"maxfev": mfev})
  1928. assert self.funcalls == res["nfev"]
  1929. if res["success"]:
  1930. assert res["nfev"] < mfev
  1931. else:
  1932. assert res["nfev"] >= mfev
  1933. for mit in [50, 500, 5000]:
  1934. res = optimize.minimize(self.slow_func, start_v,
  1935. method=method,
  1936. options={"maxiter": mit})
  1937. if res["success"]:
  1938. assert res["nit"] <= mit
  1939. else:
  1940. assert res["nit"] >= mit
  1941. for mfev, mit in [[50, 50], [5000, 5000], [5000, np.inf]]:
  1942. self.funcalls = 0
  1943. res = optimize.minimize(self.slow_func, start_v,
  1944. method=method,
  1945. options={"maxiter": mit,
  1946. "maxfev": mfev})
  1947. assert self.funcalls == res["nfev"]
  1948. if res["success"]:
  1949. assert res["nfev"] < mfev and res["nit"] <= mit
  1950. else:
  1951. assert res["nfev"] >= mfev or res["nit"] >= mit
  1952. for mfev, mit in [[np.inf, None], [None, np.inf]]:
  1953. self.funcalls = 0
  1954. res = optimize.minimize(self.slow_func, start_v,
  1955. method=method,
  1956. options={"maxiter": mit,
  1957. "maxfev": mfev})
  1958. assert self.funcalls == res["nfev"]
  1959. if res["success"]:
  1960. if mfev is None:
  1961. assert res["nfev"] < default_iters*2
  1962. else:
  1963. assert res["nit"] <= default_iters*2
  1964. else:
  1965. assert res["nfev"] >= default_iters*2 or res["nit"] >= default_iters*2
  1966. def test_result_x_shape_when_len_x_is_one():
  1967. def fun(x):
  1968. return x * x
  1969. def jac(x):
  1970. return 2. * x
  1971. def hess(x):
  1972. return np.array([[2.]])
  1973. methods = ['Nelder-Mead', 'Powell', 'CG', 'BFGS', 'L-BFGS-B', 'TNC',
  1974. 'COBYLA', 'SLSQP']
  1975. for method in methods:
  1976. res = optimize.minimize(fun, np.array([0.1]), method=method)
  1977. assert res.x.shape == (1,)
  1978. # use jac + hess
  1979. methods = ['trust-constr', 'dogleg', 'trust-ncg', 'trust-exact',
  1980. 'trust-krylov', 'Newton-CG']
  1981. for method in methods:
  1982. res = optimize.minimize(fun, np.array([0.1]), method=method, jac=jac,
  1983. hess=hess)
  1984. assert res.x.shape == (1,)
  1985. class FunctionWithGradient:
  1986. def __init__(self):
  1987. self.number_of_calls = 0
  1988. def __call__(self, x):
  1989. self.number_of_calls += 1
  1990. return np.sum(x**2), 2 * x
  1991. @pytest.fixture
  1992. def function_with_gradient():
  1993. return FunctionWithGradient()
  1994. def test_memoize_jac_function_before_gradient(function_with_gradient):
  1995. memoized_function = MemoizeJac(function_with_gradient)
  1996. x0 = np.array([1.0, 2.0])
  1997. assert_allclose(memoized_function(x0), 5.0)
  1998. assert function_with_gradient.number_of_calls == 1
  1999. assert_allclose(memoized_function.derivative(x0), 2 * x0)
  2000. assert function_with_gradient.number_of_calls == 1, \
  2001. "function is not recomputed " \
  2002. "if gradient is requested after function value"
  2003. assert_allclose(
  2004. memoized_function(2 * x0), 20.0,
  2005. err_msg="different input triggers new computation")
  2006. assert function_with_gradient.number_of_calls == 2, \
  2007. "different input triggers new computation"
  2008. def test_memoize_jac_gradient_before_function(function_with_gradient):
  2009. memoized_function = MemoizeJac(function_with_gradient)
  2010. x0 = np.array([1.0, 2.0])
  2011. assert_allclose(memoized_function.derivative(x0), 2 * x0)
  2012. assert function_with_gradient.number_of_calls == 1
  2013. assert_allclose(memoized_function(x0), 5.0)
  2014. assert function_with_gradient.number_of_calls == 1, \
  2015. "function is not recomputed " \
  2016. "if function value is requested after gradient"
  2017. assert_allclose(
  2018. memoized_function.derivative(2 * x0), 4 * x0,
  2019. err_msg="different input triggers new computation")
  2020. assert function_with_gradient.number_of_calls == 2, \
  2021. "different input triggers new computation"
  2022. def test_memoize_jac_with_bfgs(function_with_gradient):
  2023. """ Tests that using MemoizedJac in combination with ScalarFunction
  2024. and BFGS does not lead to repeated function evaluations.
  2025. Tests changes made in response to GH11868.
  2026. """
  2027. memoized_function = MemoizeJac(function_with_gradient)
  2028. jac = memoized_function.derivative
  2029. hess = optimize.BFGS()
  2030. x0 = np.array([1.0, 0.5])
  2031. scalar_function = ScalarFunction(
  2032. memoized_function, x0, (), jac, hess, None, None)
  2033. assert function_with_gradient.number_of_calls == 1
  2034. scalar_function.fun(x0 + 0.1)
  2035. assert function_with_gradient.number_of_calls == 2
  2036. scalar_function.fun(x0 + 0.2)
  2037. assert function_with_gradient.number_of_calls == 3
  2038. def test_gh12696():
  2039. # Test that optimize doesn't throw warning gh-12696
  2040. with assert_no_warnings():
  2041. optimize.fminbound(
  2042. lambda x: np.array([x**2]), -np.pi, np.pi, disp=False)
  2043. # --- Test minimize with equal upper and lower bounds --- #
  2044. def setup_test_equal_bounds():
  2045. np.random.seed(0)
  2046. x0 = np.random.rand(4)
  2047. lb = np.array([0, 2, -1, -1.0])
  2048. ub = np.array([3, 2, 2, -1.0])
  2049. i_eb = (lb == ub)
  2050. def check_x(x, check_size=True, check_values=True):
  2051. if check_size:
  2052. assert x.size == 4
  2053. if check_values:
  2054. assert_allclose(x[i_eb], lb[i_eb])
  2055. def func(x):
  2056. check_x(x)
  2057. return optimize.rosen(x)
  2058. def grad(x):
  2059. check_x(x)
  2060. return optimize.rosen_der(x)
  2061. def callback(x, *args):
  2062. check_x(x)
  2063. def constraint1(x):
  2064. check_x(x, check_values=False)
  2065. return x[0:1] - 1
  2066. def jacobian1(x):
  2067. check_x(x, check_values=False)
  2068. dc = np.zeros_like(x)
  2069. dc[0] = 1
  2070. return dc
  2071. def constraint2(x):
  2072. check_x(x, check_values=False)
  2073. return x[2:3] - 0.5
  2074. def jacobian2(x):
  2075. check_x(x, check_values=False)
  2076. dc = np.zeros_like(x)
  2077. dc[2] = 1
  2078. return dc
  2079. c1a = NonlinearConstraint(constraint1, -np.inf, 0)
  2080. c1b = NonlinearConstraint(constraint1, -np.inf, 0, jacobian1)
  2081. c2a = NonlinearConstraint(constraint2, -np.inf, 0)
  2082. c2b = NonlinearConstraint(constraint2, -np.inf, 0, jacobian2)
  2083. # test using the three methods that accept bounds, use derivatives, and
  2084. # have some trouble when bounds fix variables
  2085. methods = ('L-BFGS-B', 'SLSQP', 'TNC')
  2086. # test w/out gradient, w/ gradient, and w/ combined objective/gradient
  2087. kwds = ({"fun": func, "jac": False},
  2088. {"fun": func, "jac": grad},
  2089. {"fun": (lambda x: (func(x), grad(x))),
  2090. "jac": True})
  2091. # test with both old- and new-style bounds
  2092. bound_types = (lambda lb, ub: list(zip(lb, ub)),
  2093. Bounds)
  2094. # Test for many combinations of constraints w/ and w/out jacobian
  2095. # Pairs in format: (test constraints, reference constraints)
  2096. # (always use analytical jacobian in reference)
  2097. constraints = ((None, None), ([], []),
  2098. (c1a, c1b), (c2b, c2b),
  2099. ([c1b], [c1b]), ([c2a], [c2b]),
  2100. ([c1a, c2a], [c1b, c2b]),
  2101. ([c1a, c2b], [c1b, c2b]),
  2102. ([c1b, c2b], [c1b, c2b]))
  2103. # test with and without callback function
  2104. callbacks = (None, callback)
  2105. data = {"methods": methods, "kwds": kwds, "bound_types": bound_types,
  2106. "constraints": constraints, "callbacks": callbacks,
  2107. "lb": lb, "ub": ub, "x0": x0, "i_eb": i_eb}
  2108. return data
  2109. eb_data = setup_test_equal_bounds()
  2110. # This test is about handling fixed variables, not the accuracy of the solvers
  2111. @pytest.mark.xfail_on_32bit("Failures due to floating point issues, not logic")
  2112. @pytest.mark.parametrize('method', eb_data["methods"])
  2113. @pytest.mark.parametrize('kwds', eb_data["kwds"])
  2114. @pytest.mark.parametrize('bound_type', eb_data["bound_types"])
  2115. @pytest.mark.parametrize('constraints', eb_data["constraints"])
  2116. @pytest.mark.parametrize('callback', eb_data["callbacks"])
  2117. def test_equal_bounds(method, kwds, bound_type, constraints, callback):
  2118. """
  2119. Tests that minimizers still work if (bounds.lb == bounds.ub).any()
  2120. gh12502 - Divide by zero in Jacobian numerical differentiation when
  2121. equality bounds constraints are used
  2122. """
  2123. # GH-15051; slightly more skips than necessary; hopefully fixed by GH-14882
  2124. if (platform.machine() == 'aarch64' and method == "TNC"
  2125. and kwds["jac"] is False and callback is not None):
  2126. pytest.skip('Tolerance violation on aarch')
  2127. lb, ub = eb_data["lb"], eb_data["ub"]
  2128. x0, i_eb = eb_data["x0"], eb_data["i_eb"]
  2129. test_constraints, reference_constraints = constraints
  2130. if test_constraints and not method == 'SLSQP':
  2131. pytest.skip('Only SLSQP supports nonlinear constraints')
  2132. # reference constraints always have analytical jacobian
  2133. # if test constraints are not the same, we'll need finite differences
  2134. fd_needed = (test_constraints != reference_constraints)
  2135. bounds = bound_type(lb, ub) # old- or new-style
  2136. kwds.update({"x0": x0, "method": method, "bounds": bounds,
  2137. "constraints": test_constraints, "callback": callback})
  2138. res = optimize.minimize(**kwds)
  2139. expected = optimize.minimize(optimize.rosen, x0, method=method,
  2140. jac=optimize.rosen_der, bounds=bounds,
  2141. constraints=reference_constraints)
  2142. # compare the output of a solution with FD vs that of an analytic grad
  2143. assert res.success
  2144. assert_allclose(res.fun, expected.fun, rtol=1e-6)
  2145. assert_allclose(res.x, expected.x, rtol=5e-4)
  2146. if fd_needed or kwds['jac'] is False:
  2147. expected.jac[i_eb] = np.nan
  2148. assert res.jac.shape[0] == 4
  2149. assert_allclose(res.jac[i_eb], expected.jac[i_eb], rtol=1e-6)
  2150. if not (kwds['jac'] or test_constraints or isinstance(bounds, Bounds)):
  2151. # compare the output to an equivalent FD minimization that doesn't
  2152. # need factorization
  2153. def fun(x):
  2154. new_x = np.array([np.nan, 2, np.nan, -1])
  2155. new_x[[0, 2]] = x
  2156. return optimize.rosen(new_x)
  2157. fd_res = optimize.minimize(fun,
  2158. x0[[0, 2]],
  2159. method=method,
  2160. bounds=bounds[::2])
  2161. assert_allclose(res.fun, fd_res.fun)
  2162. # TODO this test should really be equivalent to factorized version
  2163. # above, down to res.nfev. However, testing found that when TNC is
  2164. # called with or without a callback the output is different. The two
  2165. # should be the same! This indicates that the TNC callback may be
  2166. # mutating something when it should't.
  2167. assert_allclose(res.x[[0, 2]], fd_res.x, rtol=2e-6)
  2168. @pytest.mark.parametrize('method', eb_data["methods"])
  2169. def test_all_bounds_equal(method):
  2170. # this only tests methods that have parameters factored out when lb==ub
  2171. # it does not test other methods that work with bounds
  2172. def f(x, p1=1):
  2173. return np.linalg.norm(x) + p1
  2174. bounds = [(1, 1), (2, 2)]
  2175. x0 = (1.0, 3.0)
  2176. res = optimize.minimize(f, x0, bounds=bounds, method=method)
  2177. assert res.success
  2178. assert_allclose(res.fun, f([1.0, 2.0]))
  2179. assert res.nfev == 1
  2180. assert res.message == 'All independent variables were fixed by bounds.'
  2181. args = (2,)
  2182. res = optimize.minimize(f, x0, bounds=bounds, method=method, args=args)
  2183. assert res.success
  2184. assert_allclose(res.fun, f([1.0, 2.0], 2))
  2185. if method.upper() == 'SLSQP':
  2186. def con(x):
  2187. return np.sum(x)
  2188. nlc = NonlinearConstraint(con, -np.inf, 0.0)
  2189. res = optimize.minimize(
  2190. f, x0, bounds=bounds, method=method, constraints=[nlc]
  2191. )
  2192. assert res.success is False
  2193. assert_allclose(res.fun, f([1.0, 2.0]))
  2194. assert res.nfev == 1
  2195. message = "All independent variables were fixed by bounds, but"
  2196. assert res.message.startswith(message)
  2197. nlc = NonlinearConstraint(con, -np.inf, 4)
  2198. res = optimize.minimize(
  2199. f, x0, bounds=bounds, method=method, constraints=[nlc]
  2200. )
  2201. assert res.success is True
  2202. assert_allclose(res.fun, f([1.0, 2.0]))
  2203. assert res.nfev == 1
  2204. message = "All independent variables were fixed by bounds at values"
  2205. assert res.message.startswith(message)
  2206. def test_eb_constraints():
  2207. # make sure constraint functions aren't overwritten when equal bounds
  2208. # are employed, and a parameter is factored out. GH14859
  2209. def f(x):
  2210. return x[0]**3 + x[1]**2 + x[2]*x[3]
  2211. def cfun(x):
  2212. return x[0] + x[1] + x[2] + x[3] - 40
  2213. constraints = [{'type': 'ineq', 'fun': cfun}]
  2214. bounds = [(0, 20)] * 4
  2215. bounds[1] = (5, 5)
  2216. optimize.minimize(
  2217. f,
  2218. x0=[1, 2, 3, 4],
  2219. method='SLSQP',
  2220. bounds=bounds,
  2221. constraints=constraints,
  2222. )
  2223. assert constraints[0]['fun'] == cfun
  2224. def test_show_options():
  2225. solver_methods = {
  2226. 'minimize': MINIMIZE_METHODS,
  2227. 'minimize_scalar': MINIMIZE_SCALAR_METHODS,
  2228. 'root': ROOT_METHODS,
  2229. 'root_scalar': ROOT_SCALAR_METHODS,
  2230. 'linprog': LINPROG_METHODS,
  2231. 'quadratic_assignment': QUADRATIC_ASSIGNMENT_METHODS,
  2232. }
  2233. for solver, methods in solver_methods.items():
  2234. for method in methods:
  2235. # testing that `show_options` works without error
  2236. show_options(solver, method)
  2237. unknown_solver_method = {
  2238. 'minimize': "ekki", # unknown method
  2239. 'maximize': "cg", # unknown solver
  2240. 'maximize_scalar': "ekki", # unknown solver and method
  2241. }
  2242. for solver, method in unknown_solver_method.items():
  2243. # testing that `show_options` raises ValueError
  2244. assert_raises(ValueError, show_options, solver, method)
  2245. def test_bounds_with_list():
  2246. # gh13501. Bounds created with lists weren't working for Powell.
  2247. bounds = optimize.Bounds(lb=[5., 5.], ub=[10., 10.])
  2248. optimize.minimize(
  2249. optimize.rosen, x0=np.array([9, 9]), method='Powell', bounds=bounds
  2250. )
  2251. def test_x_overwritten_user_function():
  2252. # if the user overwrites the x-array in the user function it's likely
  2253. # that the minimizer stops working properly.
  2254. # gh13740
  2255. def fquad(x):
  2256. a = np.arange(np.size(x))
  2257. x -= a
  2258. x *= x
  2259. return np.sum(x)
  2260. def fquad_jac(x):
  2261. a = np.arange(np.size(x))
  2262. x *= 2
  2263. x -= 2 * a
  2264. return x
  2265. fquad_hess = lambda x: np.eye(np.size(x)) * 2.0
  2266. meth_jac = [
  2267. 'newton-cg', 'dogleg', 'trust-ncg', 'trust-exact',
  2268. 'trust-krylov', 'trust-constr'
  2269. ]
  2270. meth_hess = [
  2271. 'dogleg', 'trust-ncg', 'trust-exact', 'trust-krylov', 'trust-constr'
  2272. ]
  2273. x0 = np.ones(5) * 1.5
  2274. for meth in MINIMIZE_METHODS:
  2275. jac = None
  2276. hess = None
  2277. if meth in meth_jac:
  2278. jac = fquad_jac
  2279. if meth in meth_hess:
  2280. hess = fquad_hess
  2281. res = optimize.minimize(fquad, x0, method=meth, jac=jac, hess=hess)
  2282. assert_allclose(res.x, np.arange(np.size(x0)), atol=2e-4)
  2283. class TestGlobalOptimization:
  2284. def test_optimize_result_attributes(self):
  2285. def func(x):
  2286. return x ** 2
  2287. # Note that `brute` solver does not return `OptimizeResult`
  2288. results = [optimize.basinhopping(func, x0=1),
  2289. optimize.differential_evolution(func, [(-4, 4)]),
  2290. optimize.shgo(func, [(-4, 4)]),
  2291. optimize.dual_annealing(func, [(-4, 4)]),
  2292. optimize.direct(func, [(-4, 4)]),
  2293. ]
  2294. for result in results:
  2295. assert isinstance(result, optimize.OptimizeResult)
  2296. assert hasattr(result, "x")
  2297. assert hasattr(result, "success")
  2298. assert hasattr(result, "message")
  2299. assert hasattr(result, "fun")
  2300. assert hasattr(result, "nfev")
  2301. assert hasattr(result, "nit")
  2302. def test_approx_fprime():
  2303. # check that approx_fprime (serviced by approx_derivative) works for
  2304. # jac and hess
  2305. g = optimize.approx_fprime(himmelblau_x0, himmelblau)
  2306. assert_allclose(g, himmelblau_grad(himmelblau_x0), rtol=5e-6)
  2307. h = optimize.approx_fprime(himmelblau_x0, himmelblau_grad)
  2308. assert_allclose(h, himmelblau_hess(himmelblau_x0), rtol=5e-6)
  2309. def test_gh12594():
  2310. # gh-12594 reported an error in `_linesearch_powell` and
  2311. # `_line_for_search` when `Bounds` was passed lists instead of arrays.
  2312. # Check that results are the same whether the inputs are lists or arrays.
  2313. def f(x):
  2314. return x[0]**2 + (x[1] - 1)**2
  2315. bounds = Bounds(lb=[-10, -10], ub=[10, 10])
  2316. res = optimize.minimize(f, x0=(0, 0), method='Powell', bounds=bounds)
  2317. bounds = Bounds(lb=np.array([-10, -10]), ub=np.array([10, 10]))
  2318. ref = optimize.minimize(f, x0=(0, 0), method='Powell', bounds=bounds)
  2319. assert_allclose(res.fun, ref.fun)
  2320. assert_allclose(res.x, ref.x)