From f76afe8416d84c50742836049eaf6598b3c2b964 Mon Sep 17 00:00:00 2001 From: Jules Laplace Date: Tue, 15 May 2018 00:15:41 +0200 Subject: lol typo --- model.py | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) (limited to 'model.py') diff --git a/model.py b/model.py index 62999c9..6c1b629 100644 --- a/model.py +++ b/model.py @@ -325,8 +325,9 @@ class PrimedGenerator(Runner): tmp_sequences = torch.LongTensor(n_seqs, n_samples).fill_(utils.q_zero(self.model.q_levels)) q_levels = self.model.q_levels - q_min = 0 - q_max = q_levels + q_width = q_levels >> 4 + q_min = q_width + q_max = q_levels - q_width print("_______-___-_---_-____") print("_____________--_-_-_______") @@ -343,11 +344,17 @@ class PrimedGenerator(Runner): for i in range(n_samples): x[:, i] = int((math.sin(i/44100 * primer_freq) + 1) / 2 * (q_max - q_min) + q_min) return x + def _saw(x): + primer_freq = float(prime_param_a) + for i in range(n_samples): + x[:, i] = int((math.sin(i/44100 * primer_freq) + 1) / 2 * (q_max - q_min) + q_min) + return x sequence_lookup = { 'zero': lambda x: x.fill_(utils.q_zero(self.model.q_levels)), 'noise': _noise, 'sin': _sin, + 'saw': _saw, } sequences = sequence_lookup.get(primer, 'zero')(sequences) -- cgit v1.2.3-70-g09d2