summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorJules Laplace <julescarbon@gmail.com>2020-02-22 17:37:24 +0100
committerJules Laplace <julescarbon@gmail.com>2020-02-22 17:37:24 +0100
commit0536d6034757083b7e59011b8105ef80e123e6fd (patch)
tree5af8db7fac649e5af63d25d2ef24c8a103c4757a
parentecc9df4237e09828cf387bf8dd1fda8617c4898d (diff)
incorporate layers into live script
-rw-r--r--cli/app/search/live.py10
1 files changed, 5 insertions, 5 deletions
diff --git a/cli/app/search/live.py b/cli/app/search/live.py
index b5d547a..eb89202 100644
--- a/cli/app/search/live.py
+++ b/cli/app/search/live.py
@@ -316,9 +316,9 @@ class Interpolator:
]
op_input_index = 1
- split_shape = [1, 128]
+ # split_shape = [1, 128]
for op_name, layer_id in zip(layer_op_names, range(7)):
- tensor_name = 'module_apply_{}/Generator_2/split:{}'.format(gen_signature, layer_id + 1)
+ tensor_name = 'module_apply_{}/linear_1/MatMul:0'.format(gen_signature)
layer_tensor = tf.get_default_graph().get_tensor_by_name(tensor_name)
op_name = 'module_apply_{}/{}'.format(gen_signature, op_name)
@@ -326,8 +326,8 @@ class Interpolator:
## unlike the encoding, here we update the layer op directly, so we don't need a temporary constant to swap
# raw_layer_placeholder = tf.constant(np.zeros(layer_tensor.shape, dtype=np.float32))
- layer_stored = LerpParam('layer_stored_{}'.format(layer_id), shape=split_shape, datatype="noise")
- layer_stored_mix = LerpParam('layer_stored_mix_{}'.format(layer_id), a_in=layer_tensor, b_in=layer_stored.output, shape=split_shape, datatype="noise")
+ layer_stored = LerpParam('layer_stored_{}'.format(layer_id), shape=layer_tensor.shape, datatype="noise")
+ layer_stored_mix = LerpParam('layer_stored_mix_{}'.format(layer_id), a_in=layer_tensor, b_in=layer_stored.output, shape=layer_tensor.shape, datatype="noise")
layer_op._update_input(op_input_index, layer_stored_mix.output)
@@ -404,7 +404,7 @@ class Interpolator:
layer_labels_stored = []
new_layer_labels = []
for i in range(layer_label_count):
- layer_labels_stored[i] = self.lerp_params['layer_stored_mix_{}'.format(i)]
+ layer_labels_stored[i] = self.lerp_params['layer_stored_{}'.format(i)]
new_layer_labels[i] = np.expand_dims(data['layer_labels'][i], axis=0)
# if we're showing an encoding already, lerp to the next one