diff options
Diffstat (limited to 'cli/app')
| -rw-r--r-- | cli/app/commands/biggan/extract_dense_vectors.py | 6 | ||||
| -rw-r--r-- | cli/app/search/json.py | 4 | ||||
| -rw-r--r-- | cli/app/search/search_dense.py | 8 | ||||
| -rw-r--r-- | cli/app/utils/cortex_utils.py | 2 |
4 files changed, 11 insertions, 9 deletions
diff --git a/cli/app/commands/biggan/extract_dense_vectors.py b/cli/app/commands/biggan/extract_dense_vectors.py index 4e7952a..b4300b5 100644 --- a/cli/app/commands/biggan/extract_dense_vectors.py +++ b/cli/app/commands/biggan/extract_dense_vectors.py @@ -9,9 +9,9 @@ from app.search.json import params_dense_dict @click.command('') @click.option('-f', '--folder_id', 'opt_folder_id', type=int, help='Folder ID to process') -@click.option('-ls', '--latent_steps', 'opt_latent_steps', default=2000, type=int, +@click.option('-ls', '--latent_steps', 'opt_latent_steps', default=1000, type=int, help='Number of optimization iterations') -@click.option('-ds', '--dense_steps', 'opt_dense_steps', default=2000, type=int, +@click.option('-ds', '--dense_steps', 'opt_dense_steps', default=1000, type=int, help='Number of optimization iterations') @click.option('-v', '--video', 'opt_video', is_flag=True, help='Export a video for each dataset') @@ -55,7 +55,7 @@ def cli(ctx, opt_folder_id, opt_latent_steps, opt_dense_steps, opt_video, opt_re find_nearest_vector_for_images( paths=paths, opt_dims=512, - opt_steps=opt_dense_steps, + opt_steps=opt_latent_steps, opt_video=opt_video, opt_tag=tag, opt_limit=-1, diff --git a/cli/app/search/json.py b/cli/app/search/json.py index ea70fd6..c50d716 100644 --- a/cli/app/search/json.py +++ b/cli/app/search/json.py @@ -16,7 +16,7 @@ def make_params_latent(tag): "out_dataset": os.path.join(app_cfg.DIR_INVERSES, tag, "dataset.latent.hdf5"), "inv_layer": "latent", "decay_lr": True, - "inv_it": 15000, + "inv_it": 5000, "generator_path": "https://tfhub.dev/deepmind/biggan-512/2", "attention_map_layer": "Generator_2/attention/Softmax:0", "pre_trained_latent": False, @@ -68,7 +68,7 @@ def make_params_dense(tag, folder_id): "dataset": os.path.join(app_cfg.DIR_INVERSES, tag, "dataset.latent.hdf5"), "inv_layer": "Generator_2/G_Z/Reshape:0", "decay_lr": False, - "inv_it": 15000, + "inv_it": 5000, "generator_path": "https://tfhub.dev/deepmind/biggan-512/2", "attention_map_layer": "Generator_2/attention/Softmax:0", "pre_trained_latent": True, diff --git a/cli/app/search/search_dense.py b/cli/app/search/search_dense.py index 6fba70d..066d946 100644 --- a/cli/app/search/search_dense.py +++ b/cli/app/search/search_dense.py @@ -327,20 +327,20 @@ def find_dense_embedding_for_images(params, opt_tag="inverse_" + timestamp(), op sample_labels = in_file['ytrain'][()] sample_fns = in_file['fn'][()] NUM_IMGS = sample_images.shape[0] # number of images to be inverted. + INFILL_IMGS = NUM_IMGS print("Number of images: {}".format(NUM_IMGS)) print("Batch size: {}".format(BATCH_SIZE)) def sample_images_gen(): - for i in range(int(NUM_IMGS / BATCH_SIZE)): + for i in range(int(INFILL_IMGS / BATCH_SIZE)): i_1, i_2 = i*BATCH_SIZE, (i+1)*BATCH_SIZE yield sample_images[i_1:i_2], sample_labels[i_1:i_2] image_gen = sample_images_gen() sample_latents = in_file['latent'] def sample_latent_gen(): - for i in range(int(NUM_IMGS / BATCH_SIZE)): + for i in range(int(INFILL_IMGS / BATCH_SIZE)): i_1, i_2 = i*BATCH_SIZE, (i+1)*BATCH_SIZE yield sample_latents[i_1:i_2] latent_gen = sample_latent_gen() - INFILL_IMGS = NUM_IMGS while INFILL_IMGS % BATCH_SIZE != 0: REMAINDER = 1 # BATCH_SIZE - (NUM_IMGS % BATCH_SIZE) INFILL_IMGS += REMAINDER @@ -444,7 +444,9 @@ def find_dense_embedding_for_images(params, opt_tag="inverse_" + timestamp(), op for i in range(BATCH_SIZE): out_i = out_pos + i if out_i >= NUM_IMGS: + print("{} >= {}, skipping...".format(out_i, NUM_IMGS)) continue + print("{}: {}".format(out_i, sample_fn)) sample_fn, ext = os.path.splitext(sample_fns[out_i]) image = Image.fromarray(images[i]) fp = BytesIO() diff --git a/cli/app/utils/cortex_utils.py b/cli/app/utils/cortex_utils.py index 1413f9a..26878fd 100644 --- a/cli/app/utils/cortex_utils.py +++ b/cli/app/utils/cortex_utils.py @@ -51,7 +51,7 @@ def find_unprocessed_files(files, reprocess=False): dataset = fn.split('-')[0] if dataset not in datasets or reprocess == True: unprocessed_files.append(file) - return unprocessed_files + return sorted(unprocessed_files, key=lambda x: x['name']) def fetch_json(url, **kwargs): """HTTP GET some JSON""" |
