diff options
| author | Jules Laplace <julescarbon@gmail.com> | 2018-05-15 00:25:23 +0200 |
|---|---|---|
| committer | Jules Laplace <julescarbon@gmail.com> | 2018-05-15 00:25:23 +0200 |
| commit | 0b2ae54eb620b3f70e082cb46ffade8571184d04 (patch) | |
| tree | 510860b29d4cf2b00b3016894ec29ecf40828483 | |
| parent | c623e761920c594c0bacb2cd3e4cab6718f3028b (diff) | |
lol typo
| -rw-r--r-- | model.py | 8 |
1 files changed, 6 insertions, 2 deletions
@@ -325,9 +325,9 @@ class PrimedGenerator(Runner): tmp_sequences = torch.LongTensor(n_seqs, n_samples).fill_(utils.q_zero(self.model.q_levels)) q_levels = self.model.q_levels - q_width = q_levels >> 4 + q_width = 64 q_min = q_width - q_max = q_levels - q_width + q_max = q_levels - (q_width*2) print("_______-___-_---_-____") print("_____________--_-_-_______") @@ -375,6 +375,10 @@ class PrimedGenerator(Runner): b = sub_sequence_b[:, j].float() * ratio tmp_sub_sequence[:, j] = torch.clamp(a + b, 1, q_levels-1).long() + if i == 20: + print(a) + print(b) + print(tmp_sub_sequence) return tmp_sub_sequence for i in range(self.model.lookback, self.model.lookback + seq_len): |
