diff options
Diffstat (limited to 'megapixels/commands/datasets')
| -rw-r--r-- | megapixels/commands/datasets/file_meta.py | 84 | ||||
| -rw-r--r-- | megapixels/commands/datasets/sha256.py | 55 |
2 files changed, 111 insertions, 28 deletions
diff --git a/megapixels/commands/datasets/file_meta.py b/megapixels/commands/datasets/file_meta.py new file mode 100644 index 00000000..e1456f44 --- /dev/null +++ b/megapixels/commands/datasets/file_meta.py @@ -0,0 +1,84 @@ +""" +Begin with this file to process folder of images +- Converts folders and subdirectories into CSV with file attributes split +""" +import click + +from app.settings import types +from app.utils import click_utils +from app.settings import app_cfg as cfg +from app.utils.logger_utils import Logger + +log = Logger.getLogger() + +@click.command() +@click.option('-i', '--input', 'opt_fp_in', required=True, + help='Input directory') +@click.option('-o', '--output', 'opt_fp_out', required=True, + help='Output file for file meta CSV') +@click.option('--slice', 'opt_slice', type=(int, int), default=(None, None), + help='Slice list of files') +@click.option('--recursive/--no-recursive', 'opt_recursive', is_flag=True, default=False, + help='Use glob recursion (slower)') +@click.option('-t', '--threads', 'opt_threads', default=4, + help='Number of threads') +@click.option('-f', '--force', 'opt_force', is_flag=True, + help='Force overwrite file') +@click.pass_context +def cli(ctx, opt_fp_in, opt_fp_out, opt_slice, opt_recursive, opt_threads, opt_force): + """Multithreading test""" + + from glob import glob + from os.path import join + from pathlib import Path + import time + from multiprocessing.dummy import Pool as ThreadPool + import random + + import pandas as pd + from tqdm import tqdm + from glob import glob + + from app.utils import file_utils, im_utils + + + if not opt_force and Path(opt_fp_out).exists(): + log.error('File exists. Use "-f / --force" to overwite') + return + + fp_ims = [] + log.info(f'Globbing {opt_fp_in}') + for ext in ['jpg', 'png']: + if opt_recursive: + fp_glob = join(opt_fp_in, '**/*.{}'.format(ext)) + fp_ims += glob(fp_glob, recursive=True) + else: + fp_glob = join(opt_fp_in, '*.{}'.format(ext)) + fp_ims += glob(fp_glob) + + if not fp_ims: + log.warn('No images. Try with "--recursive"') + return + + if opt_slice: + fp_ims = fp_ims[opt_slice[0]:opt_slice[1]] + + log.info('Processing {:,} images'.format(len(fp_ims))) + + + # convert data to dict + data = [] + for i, fp_im in enumerate(tqdm(fp_ims)): + fpp_im = Path(fp_im) + subdir = str(fpp_im.parent.relative_to(opt_fp_in)) + data.append( { + 'subdir': subdir, + 'fn': fpp_im.stem, + 'ext': fpp_im.suffix.replace('.','') + }) + + # save to CSV + file_utils.mkdirs(opt_fp_out) + df = pd.DataFrame.from_dict(data) + df.index.name = 'index' + df.to_csv(opt_fp_out)
\ No newline at end of file diff --git a/megapixels/commands/datasets/sha256.py b/megapixels/commands/datasets/sha256.py index c04fb504..4c734073 100644 --- a/megapixels/commands/datasets/sha256.py +++ b/megapixels/commands/datasets/sha256.py @@ -10,18 +10,18 @@ log = Logger.getLogger() @click.command() @click.option('-i', '--input', 'opt_fp_in', required=True, help='Input directory') -@click.option('-o', '--output', 'opt_fp_out', +@click.option('-m', '--media', 'opt_dir_media', required=True, + help='Input media directory') +@click.option('-o', '--output', 'opt_fp_out', required=True, help='Output directory') @click.option('--slice', 'opt_slice', type=(int, int), default=(None, None), help='Slice list of files') -@click.option('--recursive/--no-recursive', 'opt_recursive', is_flag=True, default=False, - help='Use glob recursion (slower)') @click.option('-t', '--threads', 'opt_threads', default=4, help='Number of threads') @click.option('-f', '--force', 'opt_force', is_flag=True, help='Force overwrite file') @click.pass_context -def cli(ctx, opt_fp_in, opt_fp_out, opt_slice, opt_recursive, opt_threads, opt_force): +def cli(ctx, opt_fp_in, opt_dir_media, opt_fp_out, opt_slice, opt_threads, opt_force): """Multithreading test""" from glob import glob @@ -42,47 +42,46 @@ def cli(ctx, opt_fp_in, opt_fp_out, opt_slice, opt_recursive, opt_threads, opt_f log.error('File exists. Use "-f / --force" to overwite') return - fp_ims = [] - for ext in ['jpg', 'png']: - if opt_recursive: - fp_glob = join(opt_fp_in, '**/*.{}'.format(ext)) - fp_ims += glob(fp_glob, recursive=True) - else: - fp_glob = join(opt_fp_in, '*.{}'.format(ext)) - fp_ims += glob(fp_glob) + df_files = pd.read_csv(opt_fp_in).set_index('index') if opt_slice: - fp_ims = fp_ims[opt_slice[0]:opt_slice[1]] + df_files = df_files[opt_slice[0]:opt_slice[1]] - log.info('Processing {:,} images'.format(len(fp_ims))) + log.info('Processing {:,} images'.format(len(df_files))) - pbar = tqdm(total=100) + + # prepare list of images to multithread into sha256s + file_objs = [] + for ds_file in df_files.itertuples(): + fp_im = join(opt_dir_media, str(ds_file.subdir), f"{ds_file.fn}.{ds_file.ext}") + file_objs.append({'fp': fp_im, 'index': ds_file.Index}) + + # convert to thread pool + pbar = tqdm(total=len(file_objs)) - def as_sha256(fp_im): + def as_sha256(file_obj): pbar.update(1) - return file_utils.sha256(fp_im) + file_obj['sha256'] = file_utils.sha256(file_obj['fp']) + return file_obj # multithread pool + pool_file_objs = [] st = time.time() pool = ThreadPool(opt_threads) - with tqdm(total=len(fp_ims)) as pbar: - sha256s = pool.map(as_sha256, fp_ims) + with tqdm(total=len(file_objs)) as pbar: + pool_file_objs = pool.map(as_sha256, file_objs) pbar.close() - + # convert data to dict data = [] - for i, fp_im in enumerate(fp_ims): - fpp_im = Path(fp_im) - subdir = str(fpp_im.parent.relative_to(opt_fp_in)) - sha256 = sha256s[i] + for pool_file_obj in pool_file_objs: data.append( { - 'sha256': sha256, - 'subdir': subdir, - 'fn': fpp_im.stem, - 'ext': fpp_im.suffix.replace('.','') + 'sha256': pool_file_obj['sha256'], + 'index': pool_file_obj['index'] }) # save to CSV + file_utils.mkdirs(opt_fp_out) df = pd.DataFrame.from_dict(data) df.to_csv(opt_fp_out, index=False) |
