-
Notifications
You must be signed in to change notification settings - Fork 2
/
Copy pathtrain_time_warp.py
43 lines (27 loc) · 1.09 KB
/
train_time_warp.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
import torch
import hydra
from omegaconf import DictConfig
import os
# Set the environment variable
torch.autograd.set_detect_anomaly(True)
os.environ['HYDRA_FULL_ERROR'] = '1'
from utils.config import compose_config_folders
from utils.config import copy_config_to_experiment_folder
from pytorch_lightning import Trainer
from mains.time_warp_main import Spatiotemporal
@hydra.main(config_path='configs', config_name='time_warp', version_base='1.1')
def main(cfg: DictConfig) -> None:
compose_config_folders(cfg)
copy_config_to_experiment_folder(cfg)
model = Spatiotemporal(cfg)
model.net.load_state_dict(torch.load('./_pretrained/timewarp.pth', weights_only=True))
trainer = Trainer(max_epochs=1)
trainer.fit(model)
model.save_model()
model.visualize(save_discretization = True, show_registered=True)
# save surface map as sample for inter surface map
# save_model(cfg.checkpointing.checkpoint_path, model.net)
# potentially you can save the rotation in your
# map (model.net) so you don't have to recompute it
if __name__ == '__main__':
main()