2
2
from numba import jit , njit
3
3
from collections import namedtuple
4
4
5
- __all__ = ['newton' , 'newton_secant' ]
5
+ __all__ = ['newton' , 'newton_halley' , ' newton_secant' ]
6
6
7
7
_ECONVERGED = 0
8
8
_ECONVERR = - 1
@@ -16,7 +16,6 @@ def _results(r):
16
16
x , funcalls , iterations , flag = r
17
17
return results (x , funcalls , iterations , flag == 0 )
18
18
19
-
20
19
@njit
21
20
def newton (func , x0 , fprime , args = (), tol = 1.48e-8 , maxiter = 50 ,
22
21
disp = True ):
@@ -48,7 +47,6 @@ def newton(func, x0, fprime, args=(), tol=1.48e-8, maxiter=50,
48
47
disp : bool, optional
49
48
If True, raise a RuntimeError if the algorithm didn't converge
50
49
51
-
52
50
Returns
53
51
-------
54
52
results : namedtuple
@@ -66,33 +64,120 @@ def newton(func, x0, fprime, args=(), tol=1.48e-8, maxiter=50,
66
64
# Convert to float (don't use float(x0); this works also for complex x0)
67
65
p0 = 1.0 * x0
68
66
funcalls = 0
69
-
67
+ status = _ECONVERR
68
+
70
69
# Newton-Raphson method
71
70
for itr in range (maxiter ):
72
71
# first evaluate fval
73
72
fval = func (p0 , * args )
74
73
funcalls += 1
75
74
# If fval is 0, a root has been found, then terminate
76
75
if fval == 0 :
77
- return _results ((p0 , funcalls , itr , _ECONVERGED ))
76
+ status = _ECONVERGED
77
+ p = p0
78
+ itr -= 1
79
+ break
78
80
fder = fprime (p0 , * args )
79
81
funcalls += 1
82
+ # derivative is zero, not converged
80
83
if fder == 0 :
81
- # derivative is zero
82
- return _results (( p0 , funcalls , itr + 1 , _ECONVERR ))
84
+ p = p0
85
+ break
83
86
newton_step = fval / fder
84
87
# Newton step
85
88
p = p0 - newton_step
86
89
if abs (p - p0 ) < tol :
87
- return _results ((p , funcalls , itr + 1 , _ECONVERGED ))
90
+ status = _ECONVERGED
91
+ break
88
92
p0 = p
89
93
90
- if disp :
94
+ if disp and status == _ECONVERR :
91
95
msg = "Failed to converge"
92
96
raise RuntimeError (msg )
93
97
94
- return _results ((p , funcalls , itr + 1 , _ECONVERR ))
98
+ return _results ((p , funcalls , itr + 1 , status ))
95
99
100
+ @njit
101
+ def newton_halley (func , x0 , fprime , fprime2 , args = (), tol = 1.48e-8 ,
102
+ maxiter = 50 , disp = True ):
103
+ """
104
+ Find a zero from Halley's method using the jitted version of
105
+ Scipy's.
106
+
107
+ `func`, `fprime`, `fprime2` must be jitted via Numba.
108
+
109
+ Parameters
110
+ ----------
111
+ func : callable and jitted
112
+ The function whose zero is wanted. It must be a function of a
113
+ single variable of the form f(x,a,b,c...), where a,b,c... are extra
114
+ arguments that can be passed in the `args` parameter.
115
+ x0 : float
116
+ An initial estimate of the zero that should be somewhere near the
117
+ actual zero.
118
+ fprime : callable and jitted
119
+ The derivative of the function (when available and convenient).
120
+ fprime2 : callable and jitted
121
+ The second order derivative of the function
122
+ args : tuple, optional
123
+ Extra arguments to be used in the function call.
124
+ tol : float, optional
125
+ The allowable error of the zero value.
126
+ maxiter : int, optional
127
+ Maximum number of iterations.
128
+ disp : bool, optional
129
+ If True, raise a RuntimeError if the algorithm didn't converge
130
+
131
+ Returns
132
+ -------
133
+ results : namedtuple
134
+ root - Estimated location where function is zero.
135
+ function_calls - Number of times the function was called.
136
+ iterations - Number of iterations needed to find the root.
137
+ converged - True if the routine converged
138
+ """
139
+
140
+ if tol <= 0 :
141
+ raise ValueError ("tol is too small <= 0" )
142
+ if maxiter < 1 :
143
+ raise ValueError ("maxiter must be greater than 0" )
144
+
145
+ # Convert to float (don't use float(x0); this works also for complex x0)
146
+ p0 = 1.0 * x0
147
+ funcalls = 0
148
+ status = _ECONVERR
149
+
150
+ # Halley Method
151
+ for itr in range (maxiter ):
152
+ # first evaluate fval
153
+ fval = func (p0 , * args )
154
+ funcalls += 1
155
+ # If fval is 0, a root has been found, then terminate
156
+ if fval == 0 :
157
+ status = _ECONVERGED
158
+ p = p0
159
+ itr -= 1
160
+ break
161
+ fder = fprime (p0 , * args )
162
+ funcalls += 1
163
+ # derivative is zero, not converged
164
+ if fder == 0 :
165
+ p = p0
166
+ break
167
+ newton_step = fval / fder
168
+ # Halley's variant
169
+ fder2 = fprime2 (p0 , * args )
170
+ p = p0 - newton_step / (1.0 - 0.5 * newton_step * fder2 / fder )
171
+ if abs (p - p0 ) < tol :
172
+ status = _ECONVERGED
173
+ break
174
+ p0 = p
175
+
176
+ if disp and status == _ECONVERR :
177
+ msg = "Failed to converge"
178
+ raise RuntimeError (msg )
179
+
180
+ return _results ((p , funcalls , itr + 1 , status ))
96
181
97
182
@njit
98
183
def newton_secant (func , x0 , args = (), tol = 1.48e-8 , maxiter = 50 ,
@@ -121,7 +206,6 @@ def newton_secant(func, x0, args=(), tol=1.48e-8, maxiter=50,
121
206
disp : bool, optional
122
207
If True, raise a RuntimeError if the algorithm didn't converge.
123
208
124
-
125
209
Returns
126
210
-------
127
211
results : namedtuple
@@ -139,6 +223,7 @@ def newton_secant(func, x0, args=(), tol=1.48e-8, maxiter=50,
139
223
# Convert to float (don't use float(x0); this works also for complex x0)
140
224
p0 = 1.0 * x0
141
225
funcalls = 0
226
+ status = _ECONVERR
142
227
143
228
# Secant method
144
229
if x0 >= 0 :
@@ -152,17 +237,21 @@ def newton_secant(func, x0, args=(), tol=1.48e-8, maxiter=50,
152
237
for itr in range (maxiter ):
153
238
if q1 == q0 :
154
239
p = (p1 + p0 ) / 2.0
155
- return _results ((p , funcalls , itr + 1 , _ECONVERGED ))
240
+ status = _ECONVERGED
241
+ break
156
242
else :
157
243
p = p1 - q1 * (p1 - p0 ) / (q1 - q0 )
158
244
if np .abs (p - p1 ) < tol :
159
- return _results ((p , funcalls , itr + 1 , _ECONVERGED ))
245
+ status = _ECONVERGED
246
+ break
160
247
p0 = p1
161
248
q0 = q1
162
249
p1 = p
163
250
q1 = func (p1 , * args )
164
251
funcalls += 1
165
252
166
- if disp :
253
+ if disp and status == _ECONVERR :
167
254
msg = "Failed to converge"
168
- raise RuntimeError (msg )
255
+ raise RuntimeError (msg )
256
+
257
+ return _results ((p , funcalls , itr + 1 , status ))
0 commit comments