-
Notifications
You must be signed in to change notification settings - Fork 159
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Dualspace update #2294
Dualspace update #2294
Changes from 1 commit
c8f8ff7
0b56ea4
576d22f
6a1ce4e
afa7f6a
352a3fe
056946b
50724d3
bed449e
febc6bc
5f3b5be
88253ce
fec4a8a
c3e1f63
f0511e9
3679674
0f073f2
87188ad
a04c3c5
b769f44
f9229d8
1c7c56b
8f71503
8bdc83f
4bd8412
61eb6ef
a84b3f6
82d8557
9fa651e
57e7795
f4f9c61
53de7c1
930082e
86c7943
0e70cc9
9186f4c
e38f2bd
58c42ec
44bf5bb
3d7f5d6
cad1509
43cfe2f
94cc748
abb1eaf
f833e78
edbc52f
4a93f46
cd278c0
2f2cd66
fee731e
b7651d9
ada1ce4
3f7ed62
0f025be
33ea7c4
e4743b8
f3d252b
40f29d5
7a7b60f
08b9ef6
fdb7a25
757826a
a51f5a5
677a717
0760cf5
9002332
632e930
b1717d2
2a312d1
773efeb
7573942
12ad7f9
c92ddfa
31be19a
2c4d74f
2a8773c
9615a93
34adae7
22a698f
5538081
45de28c
1807422
fdd0016
fda7940
09f6b22
f82f748
713f388
a47195b
26984a0
3e1ce95
e61b79c
0bec966
d3c0ec8
7d3edd1
e5f5331
621478c
e34af65
e90d257
81eccd4
b5ae54e
f7604d1
8d58ede
61f235a
95d72d0
ef9b314
5d0a9c7
17434ca
ba224b9
bffc2f8
b634289
4afc69f
6d510a3
43731d7
22a77ee
9a4ea31
3a8b42b
c901874
e480e7f
20f115e
ffa228e
dca4695
9e0cdf5
d573ffd
9a62d94
e561bcd
0c543ca
88149f6
f4dcc54
631ed9e
680b223
5b6dc24
b5b8fe9
8f8de5b
8d18cfd
d5ca8e6
921d4a4
a8018b8
1ccde41
27728ed
9bd6c75
a7d4932
2bb9d5b
7c0a721
4cf92f3
9728463
6023cee
2cf9863
7b877d8
a235aa7
13d3722
db31d7e
a42f7fd
f57354e
9a0ab79
75c2056
c188c16
436aef0
62ebd19
93e65a2
07f4628
623917b
8f5da43
49f4a13
e54db96
e3727fa
f537eaf
7f37ca0
c5aff03
bcd0185
4c0258c
5e85161
fda4a7e
9cb03d7
d3d3694
f2bf456
291cd99
6935f33
0b8a0c2
4bda0d0
c550838
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -54,7 +54,7 @@ def evaluate_adj_component(self, inputs, adj_inputs, block_variable, idx, | |
if isinstance(block_variable.output, AdjFloat): | ||
try: | ||
# Adjoint of a broadcast is just a sum | ||
return adj_inputs[0].vector().sum() | ||
return adj_inputs[0].dat.data_ro.sum() | ||
except AttributeError: | ||
# Catch the case where adj_inputs[0] is just a float | ||
return adj_inputs[0] | ||
|
@@ -67,8 +67,7 @@ def evaluate_adj_component(self, inputs, adj_inputs, block_variable, idx, | |
adj_output = self.backend.Function( | ||
block_variable.output.function_space()) | ||
adj_output.assign(prepared) | ||
adj_output = self.backend.Cofunction(adj_output.function_space().dual(), | ||
val=adj_output.vector()) | ||
adj_output = adj_output.riesz_representation(riesz_map="l2") | ||
return adj_output | ||
else: | ||
# Linear combination | ||
|
@@ -82,8 +81,7 @@ def evaluate_adj_component(self, inputs, adj_inputs, block_variable, idx, | |
) | ||
# Firedrake does not support assignment of conjugate functions | ||
adj_output.interpolate(ufl.conj(diff_expr)) | ||
adj_output = self.backend.Cofunction(adj_output.function_space().dual(), | ||
val=adj_output.vector()) | ||
adj_output = adj_output.riesz_representation(riesz_map="l2") | ||
else: | ||
mesh = adj_output.function_space().mesh() | ||
diff_expr = ufl.algorithms.expand_derivatives( | ||
|
@@ -94,7 +92,7 @@ def evaluate_adj_component(self, inputs, adj_inputs, block_variable, idx, | |
) | ||
) | ||
adj_output.assign(diff_expr) | ||
return adj_output.vector().inner(adj_input_func.vector()) | ||
return adj_output.dat.inner(adj_input_func.dat) | ||
|
||
if self.compat.isconstant(block_variable.output): | ||
R = block_variable.output._ad_function_space( | ||
|
@@ -109,14 +107,14 @@ def _adj_assign_constant(self, adj_output, constant_fs): | |
shape = r.ufl_shape | ||
if shape == () or shape[0] == 1: | ||
# Scalar Constant | ||
r.vector()[:] = adj_output.vector().sum() | ||
r.dat.data[:] = adj_output.dat.data_ro.sum() | ||
else: | ||
# We assume the shape of the constant == shape of the output | ||
# function if not scalar. This assumption is due to FEniCS not | ||
# supporting products with non-scalar constants in assign. | ||
values = [] | ||
for i in range(shape[0]): | ||
values.append(adj_output.sub(i, deepcopy=True).vector().sum()) | ||
values.append(adj_output.sub(i, deepcopy=True).dat.data_ro.sum()) | ||
r.assign(self.backend.Constant(values)) | ||
return r | ||
|
||
|
@@ -139,7 +137,7 @@ def evaluate_tlm_component(self, inputs, tlm_inputs, block_variable, idx, | |
dudmi.assign(ufl.algorithms.expand_derivatives( | ||
ufl.derivative(expr, dep.saved_output, | ||
dep.tlm_value))) | ||
dudm.vector().axpy(1.0, dudmi.vector()) | ||
dudm.dat += 1.0 * dudmi.dat | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. I've never seen direct operations on a There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. This is exactly what |
||
|
||
return dudm | ||
|
||
|
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Nitpicking, but this I think should be
r.dat.data_wo[:]
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
We are modifying the value of
r
so why should we usedat.data_ro
?.dat.data
needs to be used whenever you change the value of the.dat
, right ?There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Apologies for the confusion, I meant to say
dat.data_wo
and hastily edited the comment after making it but clearly it didn't make it to you in time! I thinkdat.data_wo
is correct here since you don't read the valueThere was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Well,
Dat.data_wo
is the same asDat.data
(see https://github.com/OP2/PyOP2/blame/master/pyop2/types/dat.py#L223). Also, we useDat.data
when changing the value of theDat
all over the code so I don't think that is a problem.There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Well I know we've been trying to move towards using the correct access descriptors wherever we ought to. Whether or not the implementation is different @connorjward can comment on. It's just a small change but I think it's a correct one. If you're really not convinced then let @connorjward weigh in