summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorjunyanz <junyanz@berkeley.edu>2017-10-06 11:18:52 -0700
committerjunyanz <junyanz@berkeley.edu>2017-10-06 11:18:52 -0700
commit1a764f00f8fced8696592bee776d2154b1e8e5a1 (patch)
treed2d404e613f6eca258b26edb6518fe5269a8a0e6
parent7800d516596f1a25986b458cddf8b8785bcc7df8 (diff)
fix minor bugs
-rw-r--r--models/base_model.py4
-rw-r--r--train.py5
2 files changed, 4 insertions, 5 deletions
diff --git a/models/base_model.py b/models/base_model.py
index 55da1ca..446a903 100644
--- a/models/base_model.py
+++ b/models/base_model.py
@@ -51,8 +51,8 @@ class BaseModel():
save_filename = '%s_net_%s.pth' % (epoch_label, network_label)
save_path = os.path.join(self.save_dir, save_filename)
network.load_state_dict(torch.load(save_path))
-
- def update_learning_rate():
+ # update learning rate (called once every epoch)
+ def update_learning_rate(self):
for scheduler in self.schedulers:
scheduler.step()
lr = self.optimizers[0].param_groups[0]['lr']
diff --git a/train.py b/train.py
index 4d80eb6..7d2a5e9 100644
--- a/train.py
+++ b/train.py
@@ -17,6 +17,7 @@ total_steps = 0
for epoch in range(opt.epoch_count, opt.niter + opt.niter_decay + 1):
epoch_start_time = time.time()
epoch_iter = 0
+
for i, data in enumerate(dataset):
iter_start_time = time.time()
total_steps += opt.batchSize
@@ -47,6 +48,4 @@ for epoch in range(opt.epoch_count, opt.niter + opt.niter_decay + 1):
print('End of epoch %d / %d \t Time Taken: %d sec' %
(epoch, opt.niter + opt.niter_decay, time.time() - epoch_start_time))
-
- if epoch > opt.niter:
- model.update_learning_rate()
+ model.update_learning_rate()