Skip to content

Commit

Permalink
no-op _add_instantiators
Browse files Browse the repository at this point in the history
  • Loading branch information
ordabayevy committed Jul 4, 2024
1 parent e8163ee commit 02443b1
Show file tree
Hide file tree
Showing 2 changed files with 19 additions and 0 deletions.
5 changes: 5 additions & 0 deletions cellarium/ml/cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -275,6 +275,11 @@ def __init__(self, args: ArgsType = None) -> None:
args=args,
)

def _add_instantiators(self) -> None:
# disable breaking dependency injection support change introduced in PyTorch Lightning 2.3
# https://github.com/Lightning-AI/pytorch-lightning/pull/18105
pass

def instantiate_classes(self) -> None:
with torch.device("meta"):
# skip the initialization of model parameters
Expand Down
14 changes: 14 additions & 0 deletions cellarium/ml/core/module.py
Original file line number Diff line number Diff line change
Expand Up @@ -61,6 +61,12 @@ def __init__(
self.save_hyperparameters(logger=False)
self.pipeline: CellariumPipeline | None = None

if optim_fn is None:
# Starting from PyTorch Lightning 2.3, automatic optimization doesn't allow to return None
# from the training step. https://github.com/Lightning-AI/pytorch-lightning/pull/19918
# Thus, we need to use manual optimization for the No Optimizer case.
self.automatic_optimization = False

def configure_model(self) -> None:
"""
.. note::
Expand Down Expand Up @@ -156,6 +162,14 @@ def training_step( # type: ignore[override]
if loss is not None:
# Logging to TensorBoard by default
self.log("train_loss", loss)

if not self.automatic_optimization:
# Note, that running .step() is necessary for incrementing the global step even though no backpropagation
# is performed.
no_optimizer = self.optimizers()
assert isinstance(no_optimizer, pl.core.optimizer.LightningOptimizer)
no_optimizer.step()

return loss

def forward(self, batch: dict[str, np.ndarray | torch.Tensor]) -> dict[str, np.ndarray | torch.Tensor]:
Expand Down

0 comments on commit 02443b1

Please sign in to comment.