diff options
| author | Jules Laplace <julescarbon@gmail.com> | 2020-02-23 15:37:15 +0100 |
|---|---|---|
| committer | Jules Laplace <julescarbon@gmail.com> | 2020-02-23 15:37:15 +0100 |
| commit | 619b10d0c954a9b175e377f2522604a32ab6dee2 (patch) | |
| tree | 8c1a48498f0fa7b88049f47d79c75b32f7709286 /cli/app/search/live.py | |
| parent | 44d95c081c9ecbfdfb4a645b40bc1d47cff129b7 (diff) | |
getting rid of the labels
Diffstat (limited to 'cli/app/search/live.py')
| -rw-r--r-- | cli/app/search/live.py | 21 |
1 files changed, 11 insertions, 10 deletions
diff --git a/cli/app/search/live.py b/cli/app/search/live.py index 22441e2..57d5df4 100644 --- a/cli/app/search/live.py +++ b/cli/app/search/live.py @@ -267,16 +267,17 @@ class Interpolator: # Label lerp_label = LerpParam('label', shape=[BATCH_SIZE, N_CLASS], datatype="label") - # Latent - stored vector + # Latent - stored vector - now that we have deep labels, we don't need this # label_stored = InterpolatorParam(name='label_stored', shape=[BATCH_SIZE, N_CLASS], datatype="label") - label_stored = LerpParam(name='label_stored', shape=[BATCH_SIZE, N_CLASS], datatype="label") - label_stored_mix = LerpParam('label_stored_mix', a_in=lerp_label.output, b_in=label_stored.output, shape=[BATCH_SIZE, Z_DIM], datatype="input") + # label_stored = LerpParam(name='label_stored', shape=[BATCH_SIZE, N_CLASS], datatype="label") + # label_stored_mix = LerpParam('label_stored_mix', a_in=lerp_label.output, b_in=label_stored.output, shape=[BATCH_SIZE, Z_DIM], datatype="input") + # gen_in['y'] = label_stored_mix.output # Generator gen_in = {} gen_in['truncation'] = 1.0 # self.opts['truncation'].variable gen_in['z'] = latent_stored_mix.output - gen_in['y'] = label_stored_mix.output + gen_in['y'] = lerp_label.output self.gen_img = generator(gen_in, signature=gen_signature) # Encoding - first hidden layer @@ -301,8 +302,8 @@ class Interpolator: # Make all the stored lerps use the same interpolation amount. tf.contrib.graph_editor.reroute_ts(encoding_stored.n.variable, latent_stored.n.variable) - tf.contrib.graph_editor.reroute_ts(encoding_stored.n.variable, label_stored.n.variable) - # tf.contrib.graph_editor.reroute_ts(encoding_stored_mix.n.variable, latent_stored_mix.n.variable) + # tf.contrib.graph_editor.reroute_ts(encoding_stored.n.variable, label_stored.n.variable) + tf.contrib.graph_editor.reroute_ts(encoding_stored_mix.n.variable, latent_stored_mix.n.variable) # tf.contrib.graph_editor.reroute_ts(encoding_stored_mix.n.variable, label_stored_mix.n.variable) layer_op_names = [ @@ -401,7 +402,7 @@ class Interpolator: new_encoding = np.expand_dims(data['encoding'], axis=0) latent_stored = self.lerp_params['latent_stored'] - label_stored = self.lerp_params['label_stored'] + # label_stored = self.lerp_params['label_stored'] encoding_stored = self.lerp_params['encoding_stored'] encoding_stored_mix = self.lerp_params['encoding_stored_mix'] @@ -415,7 +416,7 @@ class Interpolator: # if we're showing an encoding already, lerp to the next one if encoding_stored_mix.n.value > 0: encoding_stored.switch(target_value=new_encoding) - label_stored.switch(target_value=new_label) + # label_stored.switch(target_value=new_label) latent_stored.switch(target_value=new_latent) for layer, value in zip(layer_labels_stored, new_layer_labels): layer.switch(target_value=value) @@ -426,14 +427,14 @@ class Interpolator: encoding_stored.n.assign(0) encoding_stored.a.assign(new_encoding) latent_stored.a.assign(new_latent) - label_stored.a.assign(new_label) + # label_stored.a.assign(new_label) for layer, value in zip(layer_labels_stored, new_layer_labels): layer.a.assign(value) else: encoding_stored.n.assign(1) encoding_stored.b.assign(new_encoding) latent_stored.b.assign(new_latent) - label_stored.b.assign(new_label) + # label_stored.b.assign(new_label) for layer, value in zip(layer_labels_stored, new_layer_labels): layer.b.assign(value) encoding_stored_mix.switch() |
