From b39b1d51db2d485e9c60fb4d3f5445474cef8700 Mon Sep 17 00:00:00 2001 From: Jules Laplace Date: Fri, 14 Dec 2018 16:39:47 +0100 Subject: mysql import functions --- megapixels/commands/faiss/build.py | 62 ------------------------------ megapixels/commands/faiss/build_faiss.py | 58 ++++++++++++++++++++++++++++ megapixels/commands/faiss/sync.py | 18 --------- megapixels/commands/faiss/sync_metadata.py | 18 +++++++++ 4 files changed, 76 insertions(+), 80 deletions(-) delete mode 100644 megapixels/commands/faiss/build.py create mode 100644 megapixels/commands/faiss/build_faiss.py delete mode 100644 megapixels/commands/faiss/sync.py create mode 100644 megapixels/commands/faiss/sync_metadata.py (limited to 'megapixels/commands') diff --git a/megapixels/commands/faiss/build.py b/megapixels/commands/faiss/build.py deleted file mode 100644 index e525542a..00000000 --- a/megapixels/commands/faiss/build.py +++ /dev/null @@ -1,62 +0,0 @@ -""" -Index all of the FAISS datasets -""" - -import os -import glob -import click -import faiss -import time -import numpy as np - -from app.utils.file_utils import load_recipe, load_csv_safe -from app.settings import app_cfg as cfg - -engine = create_engine('sqlite:///:memory:') - -class DefaultRecipe: - def __init__(self): - self.dim = 128 - self.factory_type = 'Flat' - -@click.command() -@click.pass_context -def cli(ctx): - """build the FAISS index. - - looks for all datasets in faiss/metadata/ - - uses the recipe above by default - - however you can override this by adding a new recipe in faiss/recipes/{name}.json - """ - datasets = [] - for fn in glob.iglob(os.path.join(cfg.DIR_FAISS_METADATA, "*")): - name = os.path.basename(fn) - recipe_fn = os.path.join(cfg.DIR_FAISS_RECIPES, name + ".json") - if os.path.exists(recipe_fn): - build_faiss(name, load_recipe(recipe_fn)) - else: - build_faiss(name, DefaultRecipe()) - # index identities - # certain CSV files should be loaded into mysql - # User.__table__.drop() - SQLemployees.create(engine) - -def build_faiss(name, recipe): - vec_fn = os.path.join(cfg.DIR_FAISS_METADATA, name, "vecs.csv") - index_fn = os.path.join(cfg.DIR_FAISS_INDEXES, name + ".index") - - index = faiss.index_factory(recipe.dim, recipe.factory_type) - - keys, rows = load_csv_safe(vec_fn) - feats = np.array([ list(map(float, row[3].split(","))) for row in rows ]).astype('float32') - n, d = feats.shape - - print("{}: training {} x {} dim vectors".format(name, n, d)) - print(recipe.factory_type) - - add_start = time.time() - index.add(feats) - add_end = time.time() - add_time = add_end - add_start - print("{}: add time: {:.1f}s".format(name, add_time)) - - faiss.write_index(index, index_fn) diff --git a/megapixels/commands/faiss/build_faiss.py b/megapixels/commands/faiss/build_faiss.py new file mode 100644 index 00000000..96d3f99e --- /dev/null +++ b/megapixels/commands/faiss/build_faiss.py @@ -0,0 +1,58 @@ +""" +Index all of the FAISS datasets +""" + +import os +import glob +import click +import faiss +import time +import numpy as np + +from app.utils.file_utils import load_recipe, load_csv_safe +from app.settings import app_cfg as cfg + +engine = create_engine('sqlite:///:memory:') + +class DefaultRecipe: + def __init__(self): + self.dim = 128 + self.factory_type = 'Flat' + +@click.command() +@click.pass_context +def cli(ctx): + """build the FAISS index. + - looks for all datasets in faiss/metadata/ + - uses the recipe above by default + - however you can override this by adding a new recipe in faiss/recipes/{name}.json + """ + datasets = [] + for fn in glob.iglob(os.path.join(cfg.DIR_FAISS_METADATA, "*")): + name = os.path.basename(fn) + recipe_fn = os.path.join(cfg.DIR_FAISS_RECIPES, name + ".json") + if os.path.exists(recipe_fn): + build_faiss(name, load_recipe(recipe_fn)) + else: + build_faiss(name, DefaultRecipe()) + +def build_faiss(name, recipe): + vec_fn = os.path.join(cfg.DIR_FAISS_METADATA, name, "vecs.csv") + index_fn = os.path.join(cfg.DIR_FAISS_INDEXES, name + ".index") + + index = faiss.index_factory(recipe.dim, recipe.factory_type) + + keys, rows = load_csv_safe(vec_fn) + feats = np.array([ list(map(float, row[3].split(","))) for row in rows ]).astype('float32') + n, d = feats.shape + + print("{}: training {} x {} dim vectors".format(name, n, d)) + print(recipe.factory_type) + + add_start = time.time() + index.add(feats) + add_end = time.time() + add_time = add_end - add_start + print("{}: add time: {:.1f}s".format(name, add_time)) + + faiss.write_index(index, index_fn) diff --git a/megapixels/commands/faiss/sync.py b/megapixels/commands/faiss/sync.py deleted file mode 100644 index b01211b4..00000000 --- a/megapixels/commands/faiss/sync.py +++ /dev/null @@ -1,18 +0,0 @@ -""" -Sync the FAISS metadata -""" - -import subprocess -import click - -from app.settings import app_cfg as cfg - -@click.command() -@click.pass_context -def cli(ctx): - """synchronize metadata files from s3""" - sts = subprocess.call([ - "s3cmd", "sync", - "s3://megapixels/v1/metadata/", - cfg.DIR_FAISS_METADATA + '/', - ]) diff --git a/megapixels/commands/faiss/sync_metadata.py b/megapixels/commands/faiss/sync_metadata.py new file mode 100644 index 00000000..b01211b4 --- /dev/null +++ b/megapixels/commands/faiss/sync_metadata.py @@ -0,0 +1,18 @@ +""" +Sync the FAISS metadata +""" + +import subprocess +import click + +from app.settings import app_cfg as cfg + +@click.command() +@click.pass_context +def cli(ctx): + """synchronize metadata files from s3""" + sts = subprocess.call([ + "s3cmd", "sync", + "s3://megapixels/v1/metadata/", + cfg.DIR_FAISS_METADATA + '/', + ]) -- cgit v1.2.3-70-g09d2