Skip to content

Commit

Permalink
Merge pull request #434 from datamol-io/default_wandb
Browse files Browse the repository at this point in the history
Disable wandb by default + complete integration tests
  • Loading branch information
s-maddrellmander authored Aug 16, 2023
2 parents 6c0787c + 737fe92 commit d97f80c
Show file tree
Hide file tree
Showing 3 changed files with 71 additions and 5 deletions.
13 changes: 13 additions & 0 deletions expts/hydra-configs/experiment/toymix_mpnn.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
# @package _global_

constants:
name: neurips2023_small_data_mpnn
entity: "multitask-gnn"
seed: 42
max_epochs: 100
data_dir: expts/data/neurips2023/small-dataset
raise_train_error: true

trainer:
model_checkpoint:
dirpath: models_checkpoints/neurips2023-small-mpnn/
13 changes: 8 additions & 5 deletions graphium/cli/train_finetune.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,13 +44,16 @@ def run_training_finetuning(cfg: DictConfig) -> None:

st = timeit.default_timer()

# Disable wandb if the user is not logged in.
wandb_cfg = cfg["constants"].get("wandb")
if wandb_cfg is not None:
wandb.init(
entity=wandb_cfg["entity"],
project=wandb_cfg["project"],
config=cfg,
if wandb.login() is False:
logger.info(
"Not logged in to wandb - disabling wandb logging.\n"
+ "To enable wandb, run `wandb login` from the command line."
)
wandb.init(mode="disabled")
elif wandb_cfg is not None:
wandb.init(config=cfg, **wandb_cfg)

## == Instantiate all required objects from their respective configs ==
# Accelerator
Expand Down
50 changes: 50 additions & 0 deletions tests/test_ipu_training.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,50 @@
import hydra
from graphium.cli.train_finetune import run_training_finetuning
import pytest

FINETUNING_CONFIG_KEY = "finetuning"


@pytest.mark.parametrize("acc_type, acc_prec", [("cpu", 32), ("ipu", 16)])
@pytest.mark.ipu
def test_cli(acc_type, acc_prec) -> None:
"""
The main CLI endpoint for training and fine-tuning Graphium models.
"""
with hydra.initialize(version_base=None, config_path="../expts/hydra-configs"):
# config is relative to a module
cfg = hydra.compose(
config_name="main",
overrides=[
f"accelerator={acc_type}",
"tasks=toymix",
"training=toymix",
# Reducing number of parameters in the toymix architecture
"architecture=toymix",
"architecture.pe_encoders.encoders.la_pos.hidden_dim=16",
"architecture.pe_encoders.encoders.la_pos.num_layers=1",
"architecture.pe_encoders.encoders.rw_pos.hidden_dim=16",
"architecture.pe_encoders.encoders.rw_pos.num_layers=1",
"architecture.pre_nn.hidden_dims=32",
"architecture.pre_nn.depth=1",
"architecture.pre_nn.out_dim=16",
"architecture.gnn.in_dim=16",
"architecture.gnn.out_dim=16",
"architecture.gnn.depth=2",
"architecture.task_heads.qm9.depth=1",
"architecture.task_heads.tox21.depth=1",
"architecture.task_heads.zinc.depth=1",
# Set the number of epochs
"constants.max_epochs=2",
"+datamodule.args.task_specific_args.qm9.sample_size=1000",
"+datamodule.args.task_specific_args.tox21.sample_size=1000",
"+datamodule.args.task_specific_args.zinc.sample_size=1000",
"trainer.trainer.check_val_every_n_epoch=1",
f"trainer.trainer.precision={acc_prec}", # perhaps you can make this 32 for CPU and 16 for IPU
],
)
if acc_type == "ipu":
cfg["accelerator"]["ipu_config"].append("useIpuModel(True)")
cfg["accelerator"]["ipu_inference_config"].append("useIpuModel(True)")

run_training_finetuning(cfg)

0 comments on commit d97f80c

Please sign in to comment.