summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rwxr-xr-xgen-prime.sh61
-rw-r--r--generate.py20
-rwxr-xr-xget.pl29
-rw-r--r--model.py13
-rw-r--r--trainer/plugins.py7
5 files changed, 109 insertions, 21 deletions
diff --git a/gen-prime.sh b/gen-prime.sh
new file mode 100755
index 0000000..bbdee92
--- /dev/null
+++ b/gen-prime.sh
@@ -0,0 +1,61 @@
+function gen_prime () {
+ exp_name=${1%.*}
+ n_samples=$2
+ sample_length=$3
+ primer=$4
+ primer_a=${5:-"0"}
+ primer_b=${6:-"0"}
+ recursive=${7:-"False"}
+
+ if [ -z ${var+x} ]; then echo "var is unset"
+
+ sleep 0.1
+ echo ""
+ echo "___________________________________________________"
+ echo ">> generating $exp_name"
+ echo ""
+ python generate.py \
+ --exp $exp_name --dataset $exp_name \
+ --frame_sizes 8 2 \
+ --n_rnn 2 --dim 1024 --q_levels 256 \
+ --seq_len 1024 --batch_size 128 \
+ --val_frac 0.1 --test_frac 0.1 \
+ --sample_rate 44100 \
+ --sample_length $sample_length \
+ --keep_old_checkpoints False \
+ --n_samples $n_samples \
+ --epoch_limit 1 \
+ --primer $primer \
+ --primer_a $primer_a \
+ --primer_b $primer_b \
+ --recursive $recursive \
+ --resume True
+
+ tag="${primer}_${sample_length}"
+ if [ "$primer_a" -ne "0" ]
+ then
+ tag="${tag}_${primer_a}"
+ fi
+ if [ "$primer_b" -ne "0" ]
+ then
+ tag="${tag}_${primer_b}"
+ fi
+ if [ "$recursive" -eq "True" ]
+ then
+ tag="${tag}_recursive"
+ fi
+ ./latest.pl -l $tag $1
+}
+
+function gen_prime_set () {
+ exp_name=$1
+ gen_prime $1 6 44100 'zero'
+ gen_prime $1 6 44100 'noise'
+ gen_prime $1 6 44100 'sin' 440
+ gen_prime $1 6 44100 'noise' 0 0 True
+ gen_prime $1 6 44100 'sin' 440 0 True
+}
+
+gen_prime_set jwcglassbeat
+gen_prime_set neuralacid_solo
+gen_prime_set snaps_grain
diff --git a/generate.py b/generate.py
index c4059ec..f51b586 100644
--- a/generate.py
+++ b/generate.py
@@ -52,7 +52,10 @@ default_params = {
'loss_smoothing': 0.99,
'cuda': True,
'comet_key': None,
- 'primer': ''
+ 'primer': '',
+ 'primer_a': '',
+ 'primer_b': '',
+ 'recursive': True,
}
tag_params = [
@@ -224,7 +227,8 @@ def main(exp, frame_sizes, dataset, **params):
trainer.register_plugin(GeneratorPlugin(
os.path.join(results_path, 'samples'), params['n_samples'],
params['sample_length'], params['sample_rate'],
- params['primer']
+ params['primer'], params['primer_a'], params['primer_b'],
+ params['recursive'],
))
"""
trainer.register_plugin(
@@ -362,7 +366,17 @@ if __name__ == '__main__':
'--comet_key', help='comet.ml API key'
)
parser.add_argument(
- '--primer', help='prime the generator...'
+ '--primer', help='prime the generator with a function. noise, sin, some others..'
+ )
+ parser.add_argument(
+ '--primer_a', help='optional first parameter for primer'
+ )
+ parser.add_argument(
+ '--primer_b', help='optional second parameter for primer'
+ )
+ parser.add_argument(
+ '--recursive', type=parse_bool, default=True,
+ help='generate output recursively'
)
parser.set_defaults(**default_params)
diff --git a/get.pl b/get.pl
index 0ef39db..4b94c22 100755
--- a/get.pl
+++ b/get.pl
@@ -2,6 +2,19 @@
$SIG{TERM} = $SIG{INT} = sub { exit 1 };
+sub sanitize ($) {
+ my $s = lc shift;
+ $s =~ s/\.[^.]+$//g;
+ $s =~ s/\W//g;
+ $s =~ s/\s+/_/g;
+ if (length($s) == 0) {
+ print "fn is empty";
+ exit(1);
+ }
+ $s =~ s/_+/_/g;
+ return $s;
+}
+
chdir('datasets');
my $fn, $new_fn;
@@ -22,19 +35,10 @@ foreach my $s (@ARGV){
}
}
if ($fn) {
- $new_fn = lc $fn;
- $new_fn =~ s/\.flac$//g;
- $new_fn =~ s/\s+/_/g;
- $new_fn =~ s/\W//g;
- if (length($new_fn) == 0) {
- $new_fn = lc $s;
- $new_fn =~ s/\s+/_/g;
- $new_fn =~ s/\W//g;
- }
- $new_fn =~ s/_+/_/g;
+ $new_fn = sanitize($fn)
$new_fn .= '.flac';
system('mv', $fn, $new_fn);
- print"got fn, $fn => $new_fn\n";
+ print "got fn, $fn => $new_fn\n";
system('/usr/bin/perl', 'dataset.pl', $new_fn);
}
}
@@ -44,8 +48,9 @@ foreach my $s (@ARGV){
system('/usr/bin/wget', 'https://neural:spawn5@asdf.us/neural/' . $s);
system('/usr/bin/perl', 'dataset.pl', $s);
}
+ my $tag = sanitize($fn);
open(my $fd, ">>../run_slap.sh");
- print $fd "standard $fn";
+ print $fd "standard $tag\n";
close $fn;
$fn = undef;
diff --git a/model.py b/model.py
index 35317f3..b07b6b8 100644
--- a/model.py
+++ b/model.py
@@ -310,7 +310,7 @@ class PrimedGenerator(Runner):
super().__init__(model)
self.cuda = cuda
- def __call__(self, n_seqs, seq_len, primer):
+ def __call__(self, n_seqs, seq_len, primer, prime_param_a, prime_param_b, recursive):
# generation doesn't work with CUDNN for some reason
torch.backends.cudnn.enabled = False
@@ -321,10 +321,14 @@ class PrimedGenerator(Runner):
sequences = torch.LongTensor(n_seqs, n_samples) # 64-bit int
frame_level_outputs = [None for _ in self.model.frame_level_rnns]
+ if recursive:
+ out_sequences = sequences
+ else:
+ out_sequences = torch.LongTensor(n_seqs, n_samples).fill_(utils.q_zero(self.model.q_levels))
+
q_levels = self.model.q_levels
q_min = 0
q_max = q_levels
- primer_freq = 440
print("_______-___-_---_-____")
print("_____________--_-_-_______")
@@ -337,6 +341,7 @@ class PrimedGenerator(Runner):
for i in xrange(n_samples):
x[:, i] = random.triangular(q_min, q_max)
def sin(x):
+ primer_freq = prime_param_a
for i in xrange(n_samples):
x[:, i] = (math.sin(i/44100 * primer_freq) + 1) / 2 * (q_max - q_min) + q_min
@@ -387,8 +392,8 @@ class PrimedGenerator(Runner):
sample_dist = self.model.sample_level_mlp(
prev_samples, upper_tier_conditioning
).squeeze(1).exp_().data
- sequences[:, i] = sample_dist.multinomial(1).squeeze(1)
+ out_sequences[:, i] = sample_dist.multinomial(1).squeeze(1)
torch.backends.cudnn.enabled = True
- return sequences[:, self.model.lookback :]
+ return out_sequences[:, self.model.lookback :]
diff --git a/trainer/plugins.py b/trainer/plugins.py
index 562355e..132c33d 100644
--- a/trainer/plugins.py
+++ b/trainer/plugins.py
@@ -143,13 +143,16 @@ class GeneratorPlugin(Plugin):
pattern = 'd-{}-ep{}-s{}.wav'
- def __init__(self, samples_path, n_samples, sample_length, sample_rate, primer):
+ def __init__(self, samples_path, n_samples, sample_length, sample_rate, primer, primer_a, primer_b, recursive):
super().__init__([(1, 'epoch')])
self.samples_path = samples_path
self.n_samples = n_samples
self.sample_length = sample_length
self.sample_rate = sample_rate
self.primer = primer
+ self.primer_a = primer_a
+ self.primer_b = primer_b
+ self.recursive = recursive
def register(self, trainer):
if self.primer == "":
@@ -158,7 +161,7 @@ class GeneratorPlugin(Plugin):
self.generate = PrimedGenerator(trainer.model.model, trainer.cuda)
def epoch(self, epoch_index):
- samples = self.generate(self.n_samples, self.sample_length, self.primer) \
+ samples = self.generate(self.n_samples, self.sample_length, self.primer, self.primer_a, self.primer_b, self.recursive) \
.cpu().float().numpy()
for i in range(self.n_samples):
write_wav(