We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent a3ec0aa commit b824c01Copy full SHA for b824c01
pyadjoint/optimization/rol_solver.py
@@ -20,13 +20,11 @@ def value(self, x, tol):
20
return self._val
21
22
def gradient(self, g, x, tol):
23
- opts = {"riesz_representation": x.inner_product}
24
- self.deriv = self.rf.derivative(options=opts)
+ self.deriv = self.rf.derivative(apply_riesz=True)
25
g.dat = Enlist(self.deriv)
26
27
def hessVec(self, hv, v, x, tol):
28
29
- hessian_action = self.rf.hessian(v.dat, options=opts)
+ hessian_action = self.rf.hessian(v.dat, apply_riesz=True)
30
hv.dat = Enlist(hessian_action)
31
32
def update(self, x, flag, iteration):
@@ -78,9 +76,8 @@ def scale(self, alpha):
78
76
79
77
def riesz_map(self, derivs):
80
dat = []
81
- opts = {"riesz_representation": self.inner_product}
82
for deriv in Enlist(derivs):
83
- dat.append(deriv._ad_convert_type(deriv, options=opts))
+ dat.append(deriv.riesz_representation(riesz_map=self.inner_product)
84
return dat
85
86
def dot(self, yy):
0 commit comments