summaryrefslogtreecommitdiff
path: root/megapixels
diff options
context:
space:
mode:
authorJules Laplace <julescarbon@gmail.com>2019-01-12 17:58:51 +0100
committerJules Laplace <julescarbon@gmail.com>2019-01-12 17:58:51 +0100
commitc3eec5ef62c6aacf4ca8c8056e1f9150dcd31506 (patch)
tree9047b96e75b53b731bbb831a8d8d9b5608dac46a /megapixels
parent59f692719bb6b4163594243e4c11262dc88466b4 (diff)
returning results again
Diffstat (limited to 'megapixels')
-rw-r--r--megapixels/app/models/sql_factory.py15
-rw-r--r--megapixels/app/server/api.py6
2 files changed, 12 insertions, 9 deletions
diff --git a/megapixels/app/models/sql_factory.py b/megapixels/app/models/sql_factory.py
index 5cdaa889..eb91fb37 100644
--- a/megapixels/app/models/sql_factory.py
+++ b/megapixels/app/models/sql_factory.py
@@ -62,9 +62,10 @@ def load_sql_dataset(path, replace=False, engine=None, base_model=None):
df = pd.read_csv(fn)
# fix columns that are named "index", a sql reserved word
df.reindex_axis(sorted(df.columns), axis=1)
+ print(df.columns)
columns = [column.name for column in table.__table__.columns]
- # print(columns)
- df.columns = sorted(columns)
+ print(columns)
+ df.columns = columns
df.to_sql(name=table.__tablename__, con=engine, if_exists='replace', index=False)
return dataset
@@ -121,7 +122,7 @@ class SqlDataset:
identities = []
file_record_table = self.get_table('file_record')
for row in identity_list:
- file_record = file_record_table.query.filter(file_record_table.id == row.record_id).first()
+ file_record = file_record_table.query.filter(file_record_table.identity_id == row.id).first()
identities.append({
'file_record': file_record.toJSON(),
'identity': row.toJSON(),
@@ -172,12 +173,12 @@ class SqlDataset:
sha256 = Column(String(36, convert_unicode=True), nullable=False)
subdir = Column(String(36, convert_unicode=True), nullable=False)
uuid = Column(String(36, convert_unicode=True), nullable=False)
- identity_index = Column(Integer)
+ identity_id = Column(Integer)
def toJSON(self):
return {
'id': self.id,
'uuid': self.uuid,
- 'identity_index': self.identity_index,
+ 'identity_id': self.identity_id,
}
return FileRecord
@@ -211,7 +212,7 @@ class SqlDataset:
id = Column(Integer, primary_key=True)
h = Column(Float, nullable=False)
image_height = Column(Integer, nullable=False)
- record_index = Column(Integer, nullable=False)
+ record_id = Column(Integer, nullable=False)
image_width = Column(Integer, nullable=False)
w = Column(Float, nullable=False)
x = Column(Float, nullable=False)
@@ -219,7 +220,7 @@ class SqlDataset:
def toJSON(self):
return {
'id': self.id,
- 'record_index': self.record_index,
+ 'record_id': self.record_id,
'image_height': self.image_height,
'image_width': self.image_width,
'w': self.w,
diff --git a/megapixels/app/server/api.py b/megapixels/app/server/api.py
index 5219a8da..5f33e84b 100644
--- a/megapixels/app/server/api.py
+++ b/megapixels/app/server/api.py
@@ -111,7 +111,8 @@ def upload(dataset_name):
dists.append(round(float(_d), 2))
ids.append(_i+1)
- results = [ dataset.get_identity(int(_i)) for _i in ids ]
+ file_records = [ dataset.get_file_record(int(_i)) for _i in ids ]
+ identities = [ dataset.get_identity(rec.identity_id) for rec in file_records ]
# print(distances)
# print(ids)
@@ -128,7 +129,7 @@ def upload(dataset_name):
# print(results)
return jsonify({
'query': query,
- 'results': results,
+ 'results': identities,
'distances': dists,
})
@@ -148,6 +149,7 @@ def name_lookup(dataset_name):
'q': q,
'timing': time.time() - start,
}
+
if len(terms) == 0:
results = []
elif len(terms) == 1: