Skip to content

Commit

Permalink
Merge pull request #481 from datamol-io/finetuning
Browse files Browse the repository at this point in the history
Bug fix
  • Loading branch information
DomInvivo authored Oct 25, 2023
2 parents 870cc86 + 4670274 commit 2883d88
Show file tree
Hide file tree
Showing 4 changed files with 11 additions and 9 deletions.
2 changes: 1 addition & 1 deletion graphium/config/dummy_finetuning_from_gnn.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -98,7 +98,7 @@ metrics:
target_nan_mask: null
multitask_handling: mean-per-label
- name: r2_score
metric: r2
metric: r2_score
target_nan_mask: null
multitask_handling: mean-per-label
threshold_kwargs: null
Expand Down
2 changes: 1 addition & 1 deletion graphium/config/dummy_finetuning_from_task_head.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -104,7 +104,7 @@ metrics:
target_nan_mask: null
multitask_handling: mean-per-label
- name: r2_score
metric: r2
metric: r2_score
target_nan_mask: null
multitask_handling: mean-per-label
threshold_kwargs: null
Expand Down
4 changes: 3 additions & 1 deletion graphium/finetuning/finetuning.py
Original file line number Diff line number Diff line change
Expand Up @@ -73,7 +73,9 @@ def freeze_module(self, pl_module, module_name: str, module_map: Dict[str, Union

# We only partially freeze the finetuning module
if module_name.startswith(self.finetuning_module):
if self.training_depth > 0:
if self.training_depth == 0:
pass
else:
modules = modules[: -self.training_depth]

self.freeze(modules=modules, train_bn=self.train_bn)
Expand Down
12 changes: 6 additions & 6 deletions graphium/nn/pyg_layers/gps_pyg.py
Original file line number Diff line number Diff line change
Expand Up @@ -259,18 +259,18 @@ def forward(self, batch: Batch) -> Batch:
if self.node_residual:
if self.layer_depth < 1:
h_local = self.residual_add(h_local, feat_in)
h_local *= 1/self.scale_activations(h_local, self.output_scale)
h_local *= 1 / self.scale_activations(h_local, self.output_scale)
else:
h_local *= 1/self.scale_activations(h_local, self.output_scale)
h_local = self.residual_add(h_local, feat_in)
h_local *= 1 / self.scale_activations(h_local, self.output_scale)
h_local = self.residual_add(h_local, feat_in)
# Apply the residual connection for the edge features and scale the activations by some value to help reduce activation growth
if self.edge_residual and self.use_edges:
if self.layer_depth < 1:
e_local = self.residual_add(e_local, edges_feat_in)
e_local *= 1/self.scale_activations(e_local, self.output_scale)
e_local *= 1 / self.scale_activations(e_local, self.output_scale)
else:
e_local *= 1/self.scale_activations(e_local, self.output_scale)
e_local = self.residual_add(e_local, edges_feat_in)
e_local *= 1 / self.scale_activations(e_local, self.output_scale)
e_local = self.residual_add(e_local, edges_feat_in)

if self.norm_layer_local is not None:
h_local = self.norm_layer_local(h_local)
Expand Down

0 comments on commit 2883d88

Please sign in to comment.