diff options
| author | Jules Laplace <julescarbon@gmail.com> | 2020-01-17 01:34:16 +0100 |
|---|---|---|
| committer | Jules Laplace <julescarbon@gmail.com> | 2020-01-17 01:34:16 +0100 |
| commit | efef53035a578d3aa80ff01e0928669eb3828233 (patch) | |
| tree | 7be42991a775185402eb9e4896d629153d97077e /cli/app | |
| parent | 895938a610b629df666b7f1a68f2e80407278550 (diff) | |
just adding sine
Diffstat (limited to 'cli/app')
| -rw-r--r-- | cli/app/search/live.py | 74 |
1 files changed, 38 insertions, 36 deletions
diff --git a/cli/app/search/live.py b/cli/app/search/live.py index e626e25..31358e1 100644 --- a/cli/app/search/live.py +++ b/cli/app/search/live.py @@ -223,16 +223,17 @@ class Interpolator: disentangled = z_mix.output + zoom + shiftx + shifty + luminance # Latent - stored vector - # latent_stored = LerpParam(name='latent_stored', shape=[BATCH_SIZE, Z_DIM], datatype="noise") - latent_stored = InterpolatorParam(name='latent_stored', shape=[BATCH_SIZE, Z_DIM], datatype="noise") - latent_stored_mix = LerpParam('latent_stored_mix', a_in=disentangled, b_in=latent_stored.variable, shape=[BATCH_SIZE, Z_DIM], datatype="input") + latent_stored = LerpParam(name='latent_stored', shape=[BATCH_SIZE, Z_DIM], datatype="noise") + # latent_stored = InterpolatorParam(name='latent_stored', shape=[BATCH_SIZE, Z_DIM], datatype="noise") + latent_stored_mix = LerpParam('latent_stored_mix', a_in=disentangled, b_in=latent_stored.output, shape=[BATCH_SIZE, Z_DIM], datatype="input") # Label lerp_label = LerpParam('label', shape=[BATCH_SIZE, N_CLASS], datatype="label") # Latent - stored vector - label_stored = InterpolatorParam(name='label_stored', shape=[BATCH_SIZE, N_CLASS], datatype="label") - label_stored_mix = LerpParam('label_stored_mix', a_in=lerp_label.output, b_in=label_stored.variable, shape=[BATCH_SIZE, Z_DIM], datatype="input") + # label_stored = InterpolatorParam(name='label_stored', shape=[BATCH_SIZE, N_CLASS], datatype="label") + label_stored = LerpParam(name='label_stored', shape=[BATCH_SIZE, N_CLASS], datatype="label") + label_stored_mix = LerpParam('label_stored_mix', a_in=lerp_label.output, b_in=label_stored.output, shape=[BATCH_SIZE, Z_DIM], datatype="input") # Generator gen_in = {} @@ -249,9 +250,10 @@ class Interpolator: encoding_shape_np = tuple([1,] + encoding_shape[1:]) encoding_latent_placeholder = tf.constant(np.zeros(encoding_shape_np, dtype=np.float32)) - encoding_stored = InterpolatorParam('encoding_stored', shape=encoding_shape_np, datatype="noise") + # encoding_stored = InterpolatorParam('encoding_stored', shape=encoding_shape_np, datatype="noise") + encoding_stored = LerpParam('encoding_stored', shape=encoding_shape_np, datatype="noise") encoding_stored_sin = SinParam('encoding_orbit', shape=encoding_shape_np, datatype="noise") - encoding_stored_sum = encoding_stored.variable + encoding_stored_sin.output + encoding_stored_sum = encoding_stored.output + encoding_stored_sin.output encoding_stored_mix = LerpParam('encoding_stored_mix', a_in=encoding_latent_placeholder, b_in=encoding_stored_sum, shape=encoding_shape_np, datatype="encoding") # Use the placeholder to redirect parts of the graph. @@ -262,8 +264,8 @@ class Interpolator: tf.contrib.graph_editor.swap_ts(encoding_stored_mix.output, encoding_latent_placeholder) # Make all the stored lerps use the same interpolation amount. - # tf.contrib.graph_editor.reroute_ts(encoding_stored.n.variable, latent_stored.n.variable) - # tf.contrib.graph_editor.reroute_ts(encoding_stored.n.variable, label_stored.n.variable) + tf.contrib.graph_editor.reroute_ts(encoding_stored.n.variable, latent_stored.n.variable) + tf.contrib.graph_editor.reroute_ts(encoding_stored.n.variable, label_stored.n.variable) tf.contrib.graph_editor.reroute_ts(encoding_stored_mix.n.variable, latent_stored_mix.n.variable) tf.contrib.graph_editor.reroute_ts(encoding_stored_mix.n.variable, label_stored_mix.n.variable) @@ -327,34 +329,34 @@ class Interpolator: new_label = np.expand_dims(data['label'], axis=0) new_encoding = np.expand_dims(data['encoding'], axis=0) - self.opts['latent_stored'].assign(new_latent) - self.opts['label_stored'].assign(new_label) - self.opts['encoding_stored'].assign(new_encoding) - encoding_stored_mix.switch() - # latent_stored = self.lerp_params['latent_stored'] - # label_stored = self.lerp_params['label_stored'] - # encoding_stored = self.lerp_params['encoding_stored'] - # encoding_stored_mix = self.lerp_params['encoding_stored_mix'] + # self.opts['latent_stored'].assign(new_latent) + # self.opts['label_stored'].assign(new_label) + # self.opts['encoding_stored'].assign(new_encoding) + # encoding_stored_mix.switch() + latent_stored = self.lerp_params['latent_stored'] + label_stored = self.lerp_params['label_stored'] + encoding_stored = self.lerp_params['encoding_stored'] + encoding_stored_mix = self.lerp_params['encoding_stored_mix'] - # # if we're showing an encoding already, lerp to the next one - # if encoding_stored_mix.n.value > 0: - # encoding_stored.switch(target_value=new_encoding) - # label_stored.switch(target_value=new_label) - # latent_stored.switch(target_value=new_latent) - # # otherwise (we're showing the latent)... - # else: - # # jump to the stored encoding, then switch - # if encoding_stored.n.value < 0.5: - # encoding_stored.n.assign(0) - # encoding_stored.a.assign(new_encoding) - # latent_stored.a.assign(new_latent) - # label_stored.a.assign(new_label) - # else: - # encoding_stored.n.assign(1) - # encoding_stored.b.assign(new_encoding) - # latent_stored.b.assign(new_latent) - # label_stored.b.assign(new_label) - # encoding_stored_mix.switch() + # if we're showing an encoding already, lerp to the next one + if encoding_stored_mix.n.value > 0: + encoding_stored.switch(target_value=new_encoding) + label_stored.switch(target_value=new_label) + latent_stored.switch(target_value=new_latent) + # otherwise (we're showing the latent)... + else: + # jump to the stored encoding, then switch + if encoding_stored.n.value < 0.5: + encoding_stored.n.assign(0) + encoding_stored.a.assign(new_encoding) + latent_stored.a.assign(new_latent) + label_stored.a.assign(new_label) + else: + encoding_stored.n.assign(1) + encoding_stored.b.assign(new_encoding) + latent_stored.b.assign(new_latent) + label_stored.b.assign(new_label) + encoding_stored_mix.switch() def on_step(self, i, sess): for param in self.sin_params.values(): |
