summaryrefslogtreecommitdiff
path: root/cli/app/search
diff options
context:
space:
mode:
authorJules Laplace <julescarbon@gmail.com>2020-02-22 17:08:39 +0100
committerJules Laplace <julescarbon@gmail.com>2020-02-22 17:08:39 +0100
commit2dcd84d8a567d1cf0164e5cabe8434e6b0b99fd9 (patch)
tree4840f85647307b626fece971d03ff100ad0bcd29 /cli/app/search
parent4f146dba5bfdabf443ffacd01af86c41c12110ec (diff)
incorporate layers into live scritpg
Diffstat (limited to 'cli/app/search')
-rw-r--r--cli/app/search/live.py42
1 files changed, 41 insertions, 1 deletions
diff --git a/cli/app/search/live.py b/cli/app/search/live.py
index 05dc4b1..5161a27 100644
--- a/cli/app/search/live.py
+++ b/cli/app/search/live.py
@@ -280,7 +280,7 @@ class Interpolator:
self.gen_img = generator(gen_in, signature=gen_signature)
# Encoding - first hidden layer
- gen_layer_name = 'module_apply_' + gen_signature + '/' + params.inv_layer
+ gen_layer_name = 'module_apply_{}/{}'.format(gen_signature, params.inv_layer)
encoding_latent = tf.get_default_graph().get_tensor_by_name(gen_layer_name)
encoding_shape = encoding_latent.get_shape().as_list()
@@ -305,6 +305,33 @@ class Interpolator:
tf.contrib.graph_editor.reroute_ts(encoding_stored_mix.n.variable, latent_stored_mix.n.variable)
tf.contrib.graph_editor.reroute_ts(encoding_stored_mix.n.variable, label_stored_mix.n.variable)
+ layer_op_names = [
+ "Generator_2/concat",
+ "Generator_2/concat_1",
+ "Generator_2/concat_2",
+ "Generator_2/concat_3",
+ "Generator_2/concat_4",
+ "Generator_2/concat_5",
+ "Generator_2/concat_6",
+ ]
+
+ op_input_index = 1
+ for op_name, layer_id in zip(layer_op_names, range(7)):
+ op_name = 'module_apply_{}/{}'.format(gen_signature, op_name)
+ tensor_name = 'module_apply_{}/Generator_2/split:{}'.format(gen_signature, layer_id + 1)
+ layer_tensor = tf.get_default_graph().get_tensor_by_name(tensor_name)
+ layer_op = tf.get_default_graph().get_operation_by_name(layer_name)
+
+ ## unlike the encoding, here we update the layer op directly, so we don't need a temporary constant to swap
+ # raw_layer_placeholder = tf.constant(np.zeros(raw_tensor.shape, dtype=np.float32))
+ layer_stored = LerpParam('layer_stored_{}'.format(layer_id), shape=raw_tensor.shape, datatype="noise")
+ layer_stored_mix = LerpParam('layer_stored_mix_{}'.format(layer_id), a_in=layer_tensor, b_in=layer_stored.output, shape=raw_tensor.shape, datatype="noise")
+
+ layer_op._update_input(op_input_index, layer_stored_mix.output)
+
+ # Make all the stored lerps use the same interpolation amount.
+ tf.contrib.graph_editor.reroute_ts(encoding_stored_mix.n.variable, layer_stored_mix.n.variable)
+
sys.stderr.write("Sin params: {}\n".format(", ".join(self.sin_params.keys())))
sys.stderr.write("Lerp params: {}\n".format(", ".join(self.lerp_params.keys())))
sys.stderr.write("Smooth params: {}\n".format(", ".join(self.smooth_params.keys())))
@@ -371,11 +398,20 @@ class Interpolator:
encoding_stored = self.lerp_params['encoding_stored']
encoding_stored_mix = self.lerp_params['encoding_stored_mix']
+ layer_label_count = len(data['layer_labels'])
+ layer_labels_stored = []
+ new_layer_labels = []
+ for i in range(layer_label_count):
+ layer_labels_stored[i] = self.lerp_params['layer_stored_mix_{}'.format(i)]
+ new_layer_labels[i] = np.expand_dims(data['layer_labels'][i], axis=0)
+
# if we're showing an encoding already, lerp to the next one
if encoding_stored_mix.n.value > 0:
encoding_stored.switch(target_value=new_encoding)
label_stored.switch(target_value=new_label)
latent_stored.switch(target_value=new_latent)
+ for layer, value in zip(layer_labels_stored, new_layer_labels):
+ layer.switch(target_value=value)
# otherwise (we're showing the latent)...
else:
# jump to the stored encoding, then switch
@@ -384,11 +420,15 @@ class Interpolator:
encoding_stored.a.assign(new_encoding)
latent_stored.a.assign(new_latent)
label_stored.a.assign(new_label)
+ for layer, value in zip(layer_labels_stored, new_layer_labels):
+ layer.a.assign(value)
else:
encoding_stored.n.assign(1)
encoding_stored.b.assign(new_encoding)
latent_stored.b.assign(new_latent)
label_stored.b.assign(new_label)
+ for layer, value in zip(layer_labels_stored, new_layer_labels):
+ layer.b.assign(value)
encoding_stored_mix.switch()
def on_step(self, i, sess):