1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
|
import os
import glob
import boto3
def sync_directory(base_fn, s3_path, metadata):
"""
Synchronize a local assets folder with S3
"""
if not metadata['sync']:
return
fns = {}
for fn in glob.glob(os.path.join(base_fn, 'assets/*')):
# print(fn)
fns[os.path.basename(fn)] = True
remote_path = s3_path + metadata['url']
session = boto3.session.Session()
s3_client = session.client(
service_name='s3',
aws_access_key_id=os.getenv('S3_KEY'),
aws_secret_access_key=os.getenv('S3_SECRET'),
endpoint_url=os.getenv('S3_ENDPOINT'),
region_name=os.getenv('S3_REGION'),
)
directory = s3_client.list_objects(Bucket=os.getenv('S3_BUCKET'), Prefix=remote_path)
prefixes = []
if 'Contents' in directory:
for obj in directory['Contents']:
s3_fn = obj['Key']
# print(s3_fn)
fn = os.path.basename(s3_fn)
local_fn = os.path.join(base_fn, 'assets', fn)
if fn in fns:
del fns[fn]
if obj['LastModified'].timestamp() < os.path.getmtime(os.path.join(local_fn)):
print("s3 update {}".format(s3_fn))
s3_client.upload_file(
local_fn,
os.getenv('S3_BUCKET'),
s3_fn,
ExtraArgs={ 'ACL': 'public-read' })
else:
print("s3 delete {}".format(s3_fn))
response = s3_client.delete_object(
Bucket=os.getenv('S3_BUCKET'),
Key=s3_fn,
)
for fn in fns:
local_fn = os.path.join(base_fn, 'assets', fn)
s3_fn = os.path.join(remote_path, 'assets', fn)
print(s3_fn)
print("s3 create {}".format(s3_fn))
s3_client.upload_file(
local_fn,
os.getenv('S3_BUCKET'),
s3_fn,
ExtraArgs={ 'ACL': 'public-read' })
def make_s3_path(s3_dir, metadata_path):
return "{}/{}/{}{}".format(os.getenv('S3_ENDPOINT'), os.getenv('S3_BUCKET'), s3_dir, metadata_path)
|