diff options
| author | adamhrv <adam@ahprojects.com> | 2018-12-14 02:06:39 +0100 |
|---|---|---|
| committer | adamhrv <adam@ahprojects.com> | 2018-12-14 02:06:39 +0100 |
| commit | 5891e2f13ae9dfead0e1794c399e5ff813e694d3 (patch) | |
| tree | 05bbac1063e120f1066d8f306ac2521a1aaf70ee /megapixels/commands/datasets/sha256.py | |
| parent | 523793a79ce6ed2d2e1d48cb4765e702ee388a6e (diff) | |
added FR demo notebook
Diffstat (limited to 'megapixels/commands/datasets/sha256.py')
| -rw-r--r-- | megapixels/commands/datasets/sha256.py | 55 |
1 files changed, 27 insertions, 28 deletions
diff --git a/megapixels/commands/datasets/sha256.py b/megapixels/commands/datasets/sha256.py index c04fb504..4c734073 100644 --- a/megapixels/commands/datasets/sha256.py +++ b/megapixels/commands/datasets/sha256.py @@ -10,18 +10,18 @@ log = Logger.getLogger() @click.command() @click.option('-i', '--input', 'opt_fp_in', required=True, help='Input directory') -@click.option('-o', '--output', 'opt_fp_out', +@click.option('-m', '--media', 'opt_dir_media', required=True, + help='Input media directory') +@click.option('-o', '--output', 'opt_fp_out', required=True, help='Output directory') @click.option('--slice', 'opt_slice', type=(int, int), default=(None, None), help='Slice list of files') -@click.option('--recursive/--no-recursive', 'opt_recursive', is_flag=True, default=False, - help='Use glob recursion (slower)') @click.option('-t', '--threads', 'opt_threads', default=4, help='Number of threads') @click.option('-f', '--force', 'opt_force', is_flag=True, help='Force overwrite file') @click.pass_context -def cli(ctx, opt_fp_in, opt_fp_out, opt_slice, opt_recursive, opt_threads, opt_force): +def cli(ctx, opt_fp_in, opt_dir_media, opt_fp_out, opt_slice, opt_threads, opt_force): """Multithreading test""" from glob import glob @@ -42,47 +42,46 @@ def cli(ctx, opt_fp_in, opt_fp_out, opt_slice, opt_recursive, opt_threads, opt_f log.error('File exists. Use "-f / --force" to overwite') return - fp_ims = [] - for ext in ['jpg', 'png']: - if opt_recursive: - fp_glob = join(opt_fp_in, '**/*.{}'.format(ext)) - fp_ims += glob(fp_glob, recursive=True) - else: - fp_glob = join(opt_fp_in, '*.{}'.format(ext)) - fp_ims += glob(fp_glob) + df_files = pd.read_csv(opt_fp_in).set_index('index') if opt_slice: - fp_ims = fp_ims[opt_slice[0]:opt_slice[1]] + df_files = df_files[opt_slice[0]:opt_slice[1]] - log.info('Processing {:,} images'.format(len(fp_ims))) + log.info('Processing {:,} images'.format(len(df_files))) - pbar = tqdm(total=100) + + # prepare list of images to multithread into sha256s + file_objs = [] + for ds_file in df_files.itertuples(): + fp_im = join(opt_dir_media, str(ds_file.subdir), f"{ds_file.fn}.{ds_file.ext}") + file_objs.append({'fp': fp_im, 'index': ds_file.Index}) + + # convert to thread pool + pbar = tqdm(total=len(file_objs)) - def as_sha256(fp_im): + def as_sha256(file_obj): pbar.update(1) - return file_utils.sha256(fp_im) + file_obj['sha256'] = file_utils.sha256(file_obj['fp']) + return file_obj # multithread pool + pool_file_objs = [] st = time.time() pool = ThreadPool(opt_threads) - with tqdm(total=len(fp_ims)) as pbar: - sha256s = pool.map(as_sha256, fp_ims) + with tqdm(total=len(file_objs)) as pbar: + pool_file_objs = pool.map(as_sha256, file_objs) pbar.close() - + # convert data to dict data = [] - for i, fp_im in enumerate(fp_ims): - fpp_im = Path(fp_im) - subdir = str(fpp_im.parent.relative_to(opt_fp_in)) - sha256 = sha256s[i] + for pool_file_obj in pool_file_objs: data.append( { - 'sha256': sha256, - 'subdir': subdir, - 'fn': fpp_im.stem, - 'ext': fpp_im.suffix.replace('.','') + 'sha256': pool_file_obj['sha256'], + 'index': pool_file_obj['index'] }) # save to CSV + file_utils.mkdirs(opt_fp_out) df = pd.DataFrame.from_dict(data) df.to_csv(opt_fp_out, index=False) |
