-
Notifications
You must be signed in to change notification settings - Fork 6
/
Copy pathtrain_iros.py
101 lines (76 loc) · 3.08 KB
/
train_iros.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
import os, sys
import torch
import torch.optim as optim
from iflow.dataset import iros_dataset
from torch.utils.data import DataLoader
from iflow.utils import makedirs
from iflow import model
from iflow.trainers import cycle_dynamics_train
from iflow.utils.generic import to_torch
import numpy as np
from iflow.visualization import visualize_vector_field, visualize_2d_generated_trj, save_vector_field
from iflow.test_measures.log_likelihood import cycle_log_likelihood
percentage = .99
batch_size = 100
depth = 15
## optimization ##
lr = 0.001
weight_decay = 0.
## training variables ##
nr_epochs = 1000
filename = 'RShape'
save_folder = 'experiments'
dir_save = os.path.join(os.path.dirname(__file__),save_folder)
makedirs(dir_save)
######### GPU/ CPU #############
#device = torch.device('cuda:' + str(args.gpu) if torch.cuda.is_available() else 'cpu')
device = torch.device('cpu')
#### Invertible Flow model #####
def main_layer(dim):
return model.ResNetCouplingLayer(dim)
def create_flow_seq(dim, depth):
chain = []
for i in range(depth):
chain.append(main_layer(dim))
chain.append(model.RandomPermutation(dim))
chain.append(model.LULinear(dim))
chain.append(main_layer(dim))
return model.SequentialFlow(chain)
if __name__ == '__main__':
########## Data Loading #########
data = iros_dataset.IROS(filename=filename)
dim = data.dim
T_period = (2*np.pi)/data.w
params = {'batch_size': batch_size, 'shuffle': True}
dataloader = DataLoader(data.dataset, **params)
######### Model #########
lsd = model.LinearLimitCycle(dim, device, dt=data.dt, T_period=T_period)
flow = create_flow_seq(dim, depth)
iflow = model.ContinuousDynamicFlow(dynamics=lsd, model=flow, dim=dim).to(device)
########## Optimization ################
params = list(flow.parameters()) + list(lsd.parameters())
optimizer = optim.Adamax(params, lr = lr, weight_decay= weight_decay)
#######################################
for i in range(nr_epochs):
# Training
for local_x, local_y in dataloader:
dataloader.dataset.set_step()
optimizer.zero_grad()
loss = cycle_dynamics_train(iflow, local_x, local_y)
loss.backward(retain_graph=True)
optimizer.step()
## Validation ##
if i%1 == 0:
with torch.no_grad():
iflow.eval()
#visualize_2d_generated_trj(data.train_data, iflow, device, fig_number=2)
#visualize_vector_field(data.train_data, iflow, device, fig_number=3)
step = 20
trj = data.train_data[0]
trj_x0 = to_torch(trj[:-step,:], device)
trj_x1 = to_torch(trj[step:,:], device)
phase = to_torch(data.train_phase_data[0][:-step], device)
cycle_log_likelihood(trj_x0, trj_x1, phase, step, iflow, device)
fig_name = filename + str(i) + '.png'
save_filename = os.path.join(dir_save, fig_name)
save_vector_field(data.train_data, iflow, device, save_fig=save_filename)