-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathtrain.py
More file actions
39 lines (29 loc) · 1017 Bytes
/
train.py
File metadata and controls
39 lines (29 loc) · 1017 Bytes
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
import hydra
from hydra.utils import instantiate
from omegaconf import DictConfig, OmegaConf
import pytorch_lightning as pl
@hydra.main(config_path='conf', config_name='config')
def main(cfg: DictConfig) -> None:
pl._logger.handlers = []
pl._logger.propagate = True
if cfg.seed is not None:
pl.seed_everything(cfg.seed)
model = instantiate(cfg.pipeline, cfg=cfg)
cfg_trainer = dict(cfg.pl_trainer)
if cfg.logging:
loggers = []
for cfg_log in cfg.logging:
loggers.append(instantiate(cfg_log))
cfg_trainer['logger'] = loggers
if cfg.callbacks:
callbacks = []
for cfg_callback in cfg.callbacks:
callbacks.append(instantiate(cfg_callback))
cfg_trainer['callbacks'] =callbacks
trainer = pl.Trainer(**cfg_trainer)
datamodule = instantiate(cfg.dataset)
trainer.fit(model, datamodule)
if cfg.run_test:
trainer.test(model, datamodule=datamodule)
if __name__ == '__main__':
main()