Skip to content

Commit

Permalink
update
Browse files Browse the repository at this point in the history
  • Loading branch information
yarkin06 committed Jun 23, 2022
1 parent 9c7fa50 commit 9864a2a
Show file tree
Hide file tree
Showing 10 changed files with 97 additions and 8 deletions.
5 changes: 5 additions & 0 deletions HW2/Untitled-1.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@

import numpy as np
a = np.array([[0.00324953]])
print(a)
print(-a)
74 changes: 71 additions & 3 deletions HW2/WolfePowellSearch.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,19 +54,21 @@
# should return t=0.0938


from re import T
import numpy as np


def matrnr():
# set your matriculation number here
matrnr = 0
matrnr = 23062789
return matrnr


def WolfePowellSearch(f, x: np.array, d: np.array, sigma=1.0e-3, rho=1.0e-2, verbose=0):
fx = f.objective(x)
gradx = f.gradient(x)
descent = gradx.T @ d
print("wp",descent)

if descent >= 0:
raise TypeError('descent direction check failed!')
Expand All @@ -82,6 +84,72 @@ def WolfePowellSearch(f, x: np.array, d: np.array, sigma=1.0e-3, rho=1.0e-2, ver

t = 1

MISSING CODE

# MISSING CODE

fxd = f.objective(x + t*d)
gradxd = f.gradient(x + t*d)
descentxd = gradxd.T @ d

W1 = fxd <= (fx + t*sigma*descent)
W2 = descentxd >= (rho*descent)
if W1 == False:
t = t/2
fxd = f.objective(x + t*d)
W1 = fxd <= (fx + t*sigma*descent)
gradxd = f.gradient(x + t*d)
descentxd = gradxd.T @ d
W2 = descentxd >= (rho*descent)
while W1 == False:
t = t/2
fxd = f.objective(x + t*d)
W1 = fxd <= (fx + t*sigma*descent)
gradxd = f.gradient(x + t*d)
descentxd = gradxd.T @ d
W2 = descentxd >= (rho*descent)
t_min = t
t_pl = 2*t

elif W2 == True:
t_star = t
return t_star

else:
t = 2*t
fxd = f.objective(x + t*d)
W1 = fxd <= (fx + t*sigma*descent)
gradxd = f.gradient(x + t*d)
descentxd = gradxd.T @ d
W2 = descentxd >= (rho*descent)
while W1 == True:
t = 2*t
fxd = f.objective(x + t*d)
W1 = fxd <= (fx + t*sigma*descent)
gradxd = f.gradient(x + t*d)
descentxd = gradxd.T @ d
W2 = descentxd >= (rho*descent)
t_min = t/2
t_pl = t

t = t_min
fxd = f.objective(x + t*d)
W1 = fxd <= (fx + t*sigma*descent)
gradxd = f.gradient(x + t*d)
descentxd = gradxd.T @ d
W2 = descentxd >= (rho*descent)
while W2 == False:
print("4")
t = (t_min + t_pl)/2
fxd = f.objective(x + t*d)
W1 = fxd <= (fx + t*sigma*descent)
gradxd = f.gradient(x + t*d)
descentxd = gradxd.T @ d
W2 = descentxd >= (rho*descent)
if W1 == True:
t_min = t
else:
t_pl = t
print(t_min)
t_star = t_min

t = np.copy(t_star)
return t
Binary file added HW2/__pycache__/LLTSolver.cpython-39.pyc
Binary file not shown.
Binary file added HW2/__pycache__/PrecCGSolver.cpython-39.pyc
Binary file not shown.
Binary file added HW2/__pycache__/WolfePowellSearch.cpython-39.pyc
Binary file not shown.
Binary file not shown.
Binary file added HW2/__pycache__/incompleteCholesky.cpython-39.pyc
Binary file not shown.
Binary file added HW2/__pycache__/nonlinearObjective.cpython-39.pyc
Binary file not shown.
Binary file not shown.
26 changes: 21 additions & 5 deletions HW2/globalNewtonDescent.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,7 @@

def matrnr():
# set your matriculation number here
matrnr = 0
matrnr = 23062789
return matrnr


Expand All @@ -62,14 +62,30 @@ def globalNewtonDescent(f, x0: np.array, eps=1.0e-3, verbose=0):
print('Start globalNewtonDescent...')

countIter = 0
x = MISSING
xk = x0


while MISSING STATEMENT:
MISSING CODE
gradx = f.gradient(xk)
hessx = f.hessian(xk)

while np.linalg.norm(gradx) > eps:
print("tur")
# MISSING CODE
Bk = hessx
dk = PCG.PrecCGSolver(Bk,xk)
descent = gradx.T @ dk
print(dk)
if descent >= 0:
print("girdi")
dk = -gradx
print(descent)
tk = WP.WolfePowellSearch(f,xk,dk)
xk = xk + tk*dk
hessx = f.hessian(xk)
Bk = hessx

countIter = countIter + 1

x = np.copy(xk)

if verbose:
gradx = f.gradient(x)
Expand Down

0 comments on commit 9864a2a

Please sign in to comment.