-
Notifications
You must be signed in to change notification settings - Fork 5
/
Copy pathtrain_denoiser.py
64 lines (49 loc) · 1.89 KB
/
train_denoiser.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
import os
import torch
import hydra
import lightning.pytorch as pl
from lightning.pytorch.callbacks import LearningRateMonitor
from puzzlefusion_plusplus.denoiser.dataset.dataset import build_geometry_dataloader
def init_callbacks(cfg):
checkpoint_monitor = hydra.utils.instantiate(cfg.checkpoint_monitor)
lr_monitor = LearningRateMonitor(logging_interval="epoch")
# print_callback = PrintCallback()
return [checkpoint_monitor, lr_monitor]
@hydra.main(version_base=None, config_path="config/denoiser", config_name="global_config")
def main(cfg):
# fix the seed
pl.seed_everything(cfg.train_seed, workers=True)
# create directories for training outputs
os.makedirs(os.path.join(cfg.experiment_output_path, "training"), exist_ok=True)
# initialize data
train_loader, val_loader = build_geometry_dataloader(cfg)
# initialize model
model = hydra.utils.instantiate(cfg.model.model_name, cfg)
if cfg.model.encoder_weights_path is not None:
encoder_weights = torch.load(cfg.model.encoder_weights_path)['state_dict']
model.encoder.load_state_dict({k.replace('ae.', ''): v for k, v in encoder_weights.items()})
# freeze the encoder
for param in model.encoder.parameters():
param.requires_grad = False
# initialize logger
logger = hydra.utils.instantiate(cfg.logger)
# initialize callbacks
callbacks = init_callbacks(cfg)
# initialize trainer
trainer = pl.Trainer(
callbacks=callbacks,
logger=logger,
**cfg.trainer
)
# check the checkpoint
if cfg.ckpt_path is not None:
assert os.path.exists(cfg.ckpt_path), "Error: Checkpoint path does not exist."
# start training
trainer.fit(
model=model,
train_dataloaders=train_loader,
val_dataloaders=val_loader,
ckpt_path=cfg.ckpt_path
)
if __name__ == '__main__':
main()