summaryrefslogtreecommitdiff
path: root/megapixels/app/site/s3.py
diff options
context:
space:
mode:
Diffstat (limited to 'megapixels/app/site/s3.py')
-rw-r--r--megapixels/app/site/s3.py60
1 files changed, 60 insertions, 0 deletions
diff --git a/megapixels/app/site/s3.py b/megapixels/app/site/s3.py
new file mode 100644
index 00000000..99726a4d
--- /dev/null
+++ b/megapixels/app/site/s3.py
@@ -0,0 +1,60 @@
+import os
+import glob
+import boto3
+
+def sync_directory(base_fn, s3_path, metadata):
+ fns = {}
+ for fn in glob.glob(os.path.join(base_fn, 'assets/*')):
+ fns[os.path.basename(fn)] = True
+
+ if not metadata['sync']:
+ return
+
+ remote_path = s3_path + metadata['url']
+
+ session = boto3.session.Session()
+
+ s3_client = session.client(
+ service_name='s3',
+ aws_access_key_id=os.getenv('S3_KEY'),
+ aws_secret_access_key=os.getenv('S3_SECRET'),
+ endpoint_url=os.getenv('S3_ENDPOINT'),
+ region_name=os.getenv('S3_REGION'),
+ )
+
+ directory = s3_client.list_objects(Bucket=os.getenv('S3_BUCKET'), Prefix=remote_path)
+ prefixes = []
+
+ if 'Contents' in directory:
+ for obj in directory['Contents']:
+ s3_fn = obj['Key']
+ fn = os.path.basename(s3_fn)
+ local_fn = os.path.join(base_fn, 'assets', fn)
+ if fn in fns:
+ del fns[fn]
+ if obj['LastModified'].timestamp() < os.path.getmtime(os.path.join(local_fn)):
+ print("s3 update {}".format(s3_fn))
+ s3_client.upload_file(
+ local_fn,
+ os.getenv('S3_BUCKET'),
+ s3_fn,
+ ExtraArgs={ 'ACL': 'public-read' })
+ else:
+ print("s3 delete {}".format(s3_fn))
+ response = s3_client.delete_object(
+ Bucket=os.getenv('S3_BUCKET'),
+ Key=s3_fn,
+ )
+
+ for fn in fns:
+ local_fn = os.path.join(base_fn, 'assets', fn)
+ s3_fn = os.path.join(remote_path, 'assets', fn)
+ print("s3 create {}".format(s3_fn))
+ s3_client.upload_file(
+ local_fn,
+ os.getenv('S3_BUCKET'),
+ s3_fn,
+ ExtraArgs={ 'ACL': 'public-read' })
+
+def make_s3_path(s3_dir, metadata_path):
+ return "{}/{}/{}{}".format(os.getenv('S3_ENDPOINT'), os.getenv('S3_BUCKET'), s3_dir, metadata_path)