summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorJules Laplace <julescarbon@gmail.com>2020-01-11 02:25:15 +0100
committerJules Laplace <julescarbon@gmail.com>2020-01-11 02:25:15 +0100
commite4a32154c0f55f974c8fd210495898cb15027d8c (patch)
tree1d6336a82315db023d2b9118adf976902d2b4d8a
parent179f98c949dbeeb383d152e46610dd6789616231 (diff)
dense search with args
-rw-r--r--cli/app/commands/biggan/search_dense.py13
-rw-r--r--cli/app/search/search_dense.py57
l---------cli/categories1
-rw-r--r--cli/categories/biggan_all.json (renamed from inversion/categories/biggan_all.json)0
-rw-r--r--cli/categories/biggan_all_categories.json (renamed from inversion/categories/biggan_all_categories.json)0
-rw-r--r--cli/categories/biggan_all_categories.txt (renamed from inversion/categories/biggan_all_categories.txt)0
-rw-r--r--cli/categories/biggan_animate.json (renamed from inversion/categories/biggan_animate.json)0
-rw-r--r--cli/categories/biggan_animate.txt (renamed from inversion/categories/biggan_animate.txt)0
-rw-r--r--cli/categories/biggan_inanimate.json (renamed from inversion/categories/biggan_inanimate.json)0
-rw-r--r--cli/categories/biggan_inanimate.txt (renamed from inversion/categories/biggan_inanimate.txt)0
10 files changed, 44 insertions, 27 deletions
diff --git a/cli/app/commands/biggan/search_dense.py b/cli/app/commands/biggan/search_dense.py
index 0cf56a3..b743350 100644
--- a/cli/app/commands/biggan/search_dense.py
+++ b/cli/app/commands/biggan/search_dense.py
@@ -1,15 +1,22 @@
import click
-from app.search.params import Params
+from app.search.params import Params, timestamp
from app.search.search_dense import find_dense_embedding_for_images
@click.command('')
@click.option('-i', '--input', 'opt_fp_in', required=True,
help='Path to input image')
+@click.option('-t', '--tag', 'opt_tag', default="inverse_" + timestamp(),
+ help='Tag this build')
+@click.option('-ll', '--feature_layers', 'opt_feature_layers', default="1a,2a,4a,7a",
+ help='Feature layers used for loss')
+@click.option('-s', '--save_progress', 'opt_save_progress', is_flag=True,
+ help='Save example images every 500 frames')
@click.pass_context
-def cli(ctx, opt_fp_in):
+def cli(ctx, opt_fp_in, opt_tag, opt_feature_layers, opt_save_progress):
"""
Search for an image (class vector) in BigGAN using gradient descent
"""
params = Params(opt_fp_in)
- find_dense_embedding_for_images(params)
+ opt_feature_layers = opt_feature_layers.split(',')
+ find_dense_embedding_for_images(params, opt_tag=opt_tag, opt_feature_layers=opt_feature_layers, opt_save_progress=opt_save_progress)
diff --git a/cli/app/search/search_dense.py b/cli/app/search/search_dense.py
index ac67d07..ac66a73 100644
--- a/cli/app/search/search_dense.py
+++ b/cli/app/search/search_dense.py
@@ -42,7 +42,7 @@ feature_layer_names = {
'7c': "InceptionV3/Mixed_7c",
}
-def find_dense_embedding_for_images(params):
+def find_dense_embedding_for_images(params, opt_tag="inverse", opt_feature_layers=["1a,2a,3a,4a,7a"], opt_save_progress=True):
# --------------------------
# Global directories.
# --------------------------
@@ -227,24 +227,35 @@ def find_dense_embedding_for_images(params):
# # feat_loss += tf.reduce_mean(feat_square_diff) * 0.17
# # img_feat_err += tf.reduce_mean(feat_square_diff, axis=1) * 0.17
+ feat_square_diff = tf.constant(0.0)
+ img_feat_err = tf.constant(0.0)
+
+ for layer in opt_feature_layers:
+ layer_name = feature_layer_names[layer]
+ gen_feat = gen_feat_ex[layer_name]
+ target_feat = target_feat_ex[layer_name]
+ feat_square_diff = tf.reshape(tf.square(gen_feat - target_feat), [batch_size, -1])
+ feat_loss += tf.reduce_mean(feat_square_diff) / len(opt_feature_layers)
+ img_feat_err += tf.reduce_mean(feat_square_diff, axis=1) / len(opt_feature_layers)
+
# conv1 1, conv1 2, conv3 2 and conv4 2
- gen_feat = gen_feat_ex["InceptionV3/Conv2d_1a_3x3"]
- target_feat = target_feat_ex["InceptionV3/Conv2d_1a_3x3"]
- feat_square_diff = tf.reshape(tf.square(gen_feat - target_feat), [BATCH_SIZE, -1])
- feat_loss = tf.reduce_mean(feat_square_diff) * 0.25
- img_feat_err = tf.reduce_mean(feat_square_diff, axis=1) * 0.25
+ # gen_feat = gen_feat_ex["InceptionV3/Conv2d_1a_3x3"]
+ # target_feat = target_feat_ex["InceptionV3/Conv2d_1a_3x3"]
+ # feat_square_diff = tf.reshape(tf.square(gen_feat - target_feat), [BATCH_SIZE, -1])
+ # feat_loss = tf.reduce_mean(feat_square_diff) * 0.25
+ # img_feat_err = tf.reduce_mean(feat_square_diff, axis=1) * 0.25
- gen_feat = gen_feat_ex["InceptionV3/Conv2d_2a_3x3"]
- target_feat = target_feat_ex["InceptionV3/Conv2d_2a_3x3"]
- feat_square_diff = tf.reshape(tf.square(gen_feat - target_feat), [BATCH_SIZE, -1])
- feat_loss += tf.reduce_mean(feat_square_diff) * 0.25
- img_feat_err += tf.reduce_mean(feat_square_diff, axis=1) * 0.25
+ # gen_feat = gen_feat_ex["InceptionV3/Conv2d_2a_3x3"]
+ # target_feat = target_feat_ex["InceptionV3/Conv2d_2a_3x3"]
+ # feat_square_diff = tf.reshape(tf.square(gen_feat - target_feat), [BATCH_SIZE, -1])
+ # feat_loss += tf.reduce_mean(feat_square_diff) * 0.25
+ # img_feat_err += tf.reduce_mean(feat_square_diff, axis=1) * 0.25
- gen_feat = gen_feat_ex["InceptionV3/Conv2d_3b_1x1"]
- target_feat = target_feat_ex["InceptionV3/Conv2d_3b_1x1"]
- feat_square_diff = tf.reshape(tf.square(gen_feat - target_feat), [BATCH_SIZE, -1])
- feat_loss += tf.reduce_mean(feat_square_diff) * 0.25
- img_feat_err += tf.reduce_mean(feat_square_diff, axis=1) * 0.25
+ # gen_feat = gen_feat_ex["InceptionV3/Conv2d_3b_1x1"]
+ # target_feat = target_feat_ex["InceptionV3/Conv2d_3b_1x1"]
+ # feat_square_diff = tf.reshape(tf.square(gen_feat - target_feat), [BATCH_SIZE, -1])
+ # feat_loss += tf.reduce_mean(feat_square_diff) * 0.25
+ # img_feat_err += tf.reduce_mean(feat_square_diff, axis=1) * 0.25
# gen_feat = gen_feat_ex["InceptionV3/Mixed_6a"]
# target_feat = target_feat_ex["InceptionV3/Mixed_6a"]
@@ -252,11 +263,11 @@ def find_dense_embedding_for_images(params):
# feat_loss += tf.reduce_mean(feat_square_diff) * 0.25
# img_feat_err += tf.reduce_mean(feat_square_diff, axis=1) * 0.25
- gen_feat = gen_feat_ex["InceptionV3/Mixed_7a"]
- target_feat = target_feat_ex["InceptionV3/Mixed_7a"]
- feat_square_diff = tf.reshape(tf.square(gen_feat - target_feat), [BATCH_SIZE, -1])
- feat_loss += tf.reduce_mean(feat_square_diff) * 0.25
- img_feat_err += tf.reduce_mean(feat_square_diff, axis=1) * 0.25
+ # gen_feat = gen_feat_ex["InceptionV3/Mixed_7a"]
+ # target_feat = target_feat_ex["InceptionV3/Mixed_7a"]
+ # feat_square_diff = tf.reshape(tf.square(gen_feat - target_feat), [BATCH_SIZE, -1])
+ # feat_loss += tf.reduce_mean(feat_square_diff) * 0.25
+ # img_feat_err += tf.reduce_mean(feat_square_diff, axis=1) * 0.25
else:
feat_loss = tf.constant(0.0)
@@ -393,7 +404,7 @@ def find_dense_embedding_for_images(params):
sys.stdout.flush()
# Save target images and reconstructions.
- if params.save_progress:
+ if opt_save_progress:
assert SAMPLE_SIZE <= BATCH_SIZE
gen_time = time.time()
gen_images = sess.run(gen_img)
@@ -423,7 +434,7 @@ def find_dense_embedding_for_images(params):
image = Image.fromarray(images[i])
fp = BytesIO()
image.save(fp, format='png')
- data = upload_bytes_to_cortex(params.folder_id, sample_fn + "-inverse.png", fp, "image/png")
+ data = upload_bytes_to_cortex(params.folder_id, "{}-{}.png".format(sample_fn, tag), fp, "image/png")
print(json.dumps(data, indent=2))
if data is not None and 'files' in data:
file_id = data['files'][0]['id']
diff --git a/cli/categories b/cli/categories
deleted file mode 120000
index 5668d42..0000000
--- a/cli/categories
+++ /dev/null
@@ -1 +0,0 @@
-../inversion/categories \ No newline at end of file
diff --git a/inversion/categories/biggan_all.json b/cli/categories/biggan_all.json
index 9981a91..9981a91 100644
--- a/inversion/categories/biggan_all.json
+++ b/cli/categories/biggan_all.json
diff --git a/inversion/categories/biggan_all_categories.json b/cli/categories/biggan_all_categories.json
index 5212e90..5212e90 100644
--- a/inversion/categories/biggan_all_categories.json
+++ b/cli/categories/biggan_all_categories.json
diff --git a/inversion/categories/biggan_all_categories.txt b/cli/categories/biggan_all_categories.txt
index a0e37eb..a0e37eb 100644
--- a/inversion/categories/biggan_all_categories.txt
+++ b/cli/categories/biggan_all_categories.txt
diff --git a/inversion/categories/biggan_animate.json b/cli/categories/biggan_animate.json
index 35a35b0..35a35b0 100644
--- a/inversion/categories/biggan_animate.json
+++ b/cli/categories/biggan_animate.json
diff --git a/inversion/categories/biggan_animate.txt b/cli/categories/biggan_animate.txt
index 69386c9..69386c9 100644
--- a/inversion/categories/biggan_animate.txt
+++ b/cli/categories/biggan_animate.txt
diff --git a/inversion/categories/biggan_inanimate.json b/cli/categories/biggan_inanimate.json
index 6a7498c..6a7498c 100644
--- a/inversion/categories/biggan_inanimate.json
+++ b/cli/categories/biggan_inanimate.json
diff --git a/inversion/categories/biggan_inanimate.txt b/cli/categories/biggan_inanimate.txt
index 05540d9..05540d9 100644
--- a/inversion/categories/biggan_inanimate.txt
+++ b/cli/categories/biggan_inanimate.txt