Skip to content

Commit

Permalink
HW5 Done
Browse files Browse the repository at this point in the history
  • Loading branch information
yarkin06 committed Jul 21, 2022
1 parent 7755833 commit a7edfa9
Show file tree
Hide file tree
Showing 12 changed files with 39 additions and 14 deletions.
Binary file added HW5/__pycache__/LLTSolver.cpython-39.pyc
Binary file not shown.
Binary file added HW5/__pycache__/PrecCGSolver.cpython-39.pyc
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file added HW5/__pycache__/benchmarkObjective.cpython-39.pyc
Binary file not shown.
Binary file added HW5/__pycache__/incompleteCholesky.cpython-39.pyc
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file added HW5/__pycache__/projectionInBox.cpython-39.pyc
Binary file not shown.
Binary file added HW5/__pycache__/quadraticObjective.cpython-39.pyc
Binary file not shown.
45 changes: 35 additions & 10 deletions HW5/augmentedLagrangianDescent.py
Original file line number Diff line number Diff line change
Expand Up @@ -64,7 +64,7 @@

def matrnr():
# set your matriculation number here
matrnr = 0
matrnr = 23062789
return matrnr


Expand All @@ -79,17 +79,42 @@ def augmentedLagrangianDescent(f, P, h, x0: np.array, alpha0=0, eps=1.0e-3, delt
print('Start augmentedLagrangianDescent...')

countIter = 0
xp = MISSING
alphak = MISSING


while MISSING STATEMENT:
MISSING CODE

xk = P.project(x0)
hk = h.objective(xk)
alphak = alpha0
gammak = 10
epsk = 1 / gammak
deltak = epsk**0.1
Ak = AO.augmentedLagrangianObjective(f, h, alphak, gammak)
# Ak_obj = Ak.objective(xk)
Ak_grad = Ak.gradient(xk)


while np.linalg.norm(xk - P.project(xk-Ak_grad)) > eps or np.linalg.norm(hk) > delta:
xmin = PD.projectedNewtonDescent(Ak, P, xk, epsk)
xk = xmin
# Ak_obj = Ak.objective(xk)
Ak_grad = Ak.gradient(xk)
hk = h.objective(xk)
if np.linalg.norm(hk) <= deltak:
alphak = alphak + gammak*h.objective(xk)
epsk = max(epsk/gammak,eps)
deltak = max(deltak/gammak**0.9,delta)
else:
gammak = max(10,np.sqrt(gammak))*gammak
epsk = 1/gammak
deltak = epsk**0.1

Ak = AO.augmentedLagrangianObjective(f, h, alphak, gammak)
# Ak_obj = Ak.objective(xk)
Ak_grad = Ak.gradient(xk)

countIter = countIter + 1

xp = xk
alphap = alphak

if verbose:
print('augmentedLagrangianDescent terminated after ', countIter, ' steps)
print('augmentedLagrangianDescent terminated after ', countIter, ' steps')

return [xp, alphak]
return [xp, alphap]
8 changes: 4 additions & 4 deletions HW5/augmentedLagrangianObjective.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@

def matrnr():
# set your matriculation number here
matrnr = 0
matrnr = 23062789
return matrnr


Expand All @@ -58,18 +58,18 @@ def __init__(self, f, h, alpha, gamma):

def objective(self, x: np.array):

myObjective = MISSING
myObjective = self.f.objective(x) + self.alpha*self.h.objective(x) + 0.5*self.gamma*self.h.objective(x)**2

return myObjective

def gradient(self, x: np.array):

myGradient = MISSING
myGradient = self.f.gradient(x) + self.alpha*self.h.gradient(x) + 0.5*self.gamma*2*self.h.gradient(x)*self.h.objective(x)

return myGradient

def hessian(self, x: np.array):

myHessian = MISSING
myHessian = self.f.hessian(x) + self.alpha*self.h.hessian(x) + 0.5*self.gamma*(2*self.h.objective(x)*self.h.hessian(x)+2*self.h.gradient(x)*self.h.gradient(x).T)

return myHessian

0 comments on commit a7edfa9

Please sign in to comment.