summaryrefslogtreecommitdiff
path: root/test-mogrify.py
diff options
context:
space:
mode:
authorJules Laplace <julescarbon@gmail.com>2018-05-15 03:55:32 +0200
committerJules Laplace <julescarbon@gmail.com>2018-05-15 03:55:32 +0200
commit8c73f6069dd98e5484f655eda3ff5d07bf65219b (patch)
tree4c9de8d262c68de8ad7d8e8a4f9bff9eb7ce5221 /test-mogrify.py
parent7d23842225e871366a77901471b365984db8ef79 (diff)
test
Diffstat (limited to 'test-mogrify.py')
-rw-r--r--test-mogrify.py225
1 files changed, 128 insertions, 97 deletions
diff --git a/test-mogrify.py b/test-mogrify.py
index 12e042e..392b520 100644
--- a/test-mogrify.py
+++ b/test-mogrify.py
@@ -20,129 +20,160 @@ import subprocess
from time import sleep
if __name__ == '__main__':
- opt = TestOptions().parse()
- data_opt = DatasetOptions().parse(opt.unknown)
- opt.nThreads = 1 # test code only supports nThreads = 1
- opt.batchSize = 1 # test code only supports batchSize = 1
- opt.serial_batches = True # no shuffle
- opt.no_flip = True # no flip
- opt.experiment = data_opt.experiment # opt.start_img.split("/")[-1].split(".")[0]
+ opt = TestOptions().parse()
+ data_opt = DatasetOptions().parse(opt.unknown)
+ opt.nThreads = 1 # test code only supports nThreads = 1
+ opt.batchSize = 1 # test code only supports batchSize = 1
+ opt.serial_batches = True # no shuffle
+ opt.no_flip = True # no flip
+ opt.experiment = data_opt.experiment # opt.start_img.split("/")[-1].split(".")[0]
- d = datetime.now()
- tag = "{}_{}_{}".format(
- opt.name, data_opt.experiment,
- d.strftime('%Y%m%d%H%M'))
+ d = datetime.now()
+ tag = "{}_{}_{}".format(
+ opt.name, data_opt.experiment,
+ d.strftime('%Y%m%d%H%M'))
- opt.tag = tag # = "pcfade___201805150250"
+ opt.tag = tag # = "pcfade___201805150250"
- opt.render_dir = render_dir = opt.results_dir + opt.name + "/" + tag + "/"
+ opt.render_dir = render_dir = opt.results_dir + opt.name + "/" + tag + "/"
+ A_offset = 0
+ A_im = None
+ A_dir = None
- print("create render_dir: {}".format(render_dir))
- if os.path.exists(render_dir):
- rmtree(render_dir)
- mkdirs(render_dir)
+ print("create render_dir: {}".format(render_dir))
+ if os.path.exists(render_dir):
+ rmtree(render_dir)
+ mkdirs(render_dir)
- # cmd = ("convert", opt.start_img, '-canny', '0x1+10%+30%', render_dir + "frame_00000.png")
- # process = subprocess.Popen(cmd, stdout=subprocess.PIPE)
- # output, error = process.communicate()
+ # cmd = ("convert", opt.start_img, '-canny', '0x1+10%+30%', render_dir + "frame_00000.png")
+ # process = subprocess.Popen(cmd, stdout=subprocess.PIPE)
+ # output, error = process.communicate()
- copyfile(opt.start_img, render_dir + "frame_00000.png")
+ def load_first_frame():
+ if data_opt.just_copy:
+ copyfile(opt.start_img, render_dir + "frame_00000.png")
+ else:
+ A_im = Image.open(opt.start_img).convert('RGB')
+ A = process_image(im)
+ cv2.imwrite(render_dir + "frame_00000.png", img)
- i_offset = 0
numz = re.findall(r'\d+', os.path.basename(opt.start_img))
if len(numz) > 0:
- i_offset = int(numz[0])
+ A_offset = int(numz[0])
+ if A_offset:
+ print ">> starting offset: {}".format(A_offset)
+ A_dir = opt.start_img.replace(numz[0], "{:05d}")
- data_loader = CreateRecursiveDataLoader(opt)
- dataset = data_loader.load_data()
- ds = dataset.dataset
- model = create_model(opt)
- visualizer = Visualizer(opt)
- # create website
- # web_dir = os.path.join(opt.results_dir, opt.name, '%s_%s' % (opt.phase, opt.which_epoch))
- # webpage = html.HTML(web_dir, 'Experiment = %s, Phase = %s, Epoch = %s' % (opt.name, opt.phase, opt.which_epoch))
- # test
- last_im = None
- for i, data in enumerate(data_loader):
- if i >= opt.how_many:
- break
- model.set_input(data)
- model.test()
- visuals = model.get_current_visuals()
- img_path = model.get_image_paths()
+ def process_image(im):
+ img = im[:, :, ::-1].copy()
- if (i % 20) == 0:
- print('%04d: process image... %s' % (i, img_path))
- # ims = visualizer.save_images(webpage, visuals, img_path, aspect_ratio=opt.aspect_ratio)
+ if data_opt.clahe is True:
+ lab = cv2.cvtColor(img, cv2.COLOR_BGR2LAB)
+ l, a, b = cv2.split(lab)
+ clahe = cv2.createCLAHE(clipLimit=data_opt.clip_limit, tileGridSize=(8,8))
+ l = clahe.apply(l)
+ limg = cv2.merge((l,a,b))
+ img = cv2.cvtColor(limg, cv2.COLOR_LAB2BGR)
- im = visuals['fake_B']
- tmp_path = render_dir + "frame_{:05d}_tmp.png".format(i+1)
- edges_path = render_dir + "frame_{:05d}.png".format(i+1)
- render_path = render_dir + "ren_{:05d}.png".format(i+1)
+ if data_opt.posterize is True:
+ img = cv2.pyrMeanShiftFiltering(img, data_opt.spatial_window, data_opt.color_window)
+ if data_opt.grayscale is True:
+ img = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
+ if data_opt.blur is True:
+ img = cv2.GaussianBlur(img, (data_opt.blur_radius, data_opt.blur_radius), data_opt.blur_sigma)
+ if data_opt.canny is True:
+ img = cv2.Canny(img, data_opt.canny_lo, data_opt.canny_hi)
- image_pil = Image.fromarray(im, mode='RGB')
- image_pil.save(tmp_path)
- os.rename(tmp_path, render_path)
+ load_first_frame()
- if dataset.name() == 'RecursiveDatasetDataLoader':
- if data_opt.recursive and last_im is not None:
- tmp_im = im.copy()
+ data_loader = CreateRecursiveDataLoader(opt)
+ dataset = data_loader.load_data()
+ ds = dataset.dataset
+ model = create_model(opt)
+ visualizer = Visualizer(opt)
+ # create website
+ # web_dir = os.path.join(opt.results_dir, opt.name, '%s_%s' % (opt.phase, opt.which_epoch))
+ # webpage = html.HTML(web_dir, 'Experiment = %s, Phase = %s, Epoch = %s' % (opt.name, opt.phase, opt.which_epoch))
+ # test
+ last_im = None
+ for i, data in enumerate(data_loader):
+ if i >= opt.how_many:
+ break
+ model.set_input(data)
+ model.test()
+ visuals = model.get_current_visuals()
+ img_path = model.get_image_paths()
- frac_a = data_opt.recursive_frac
- frac_b = 1.0 - frac_a
+ if (i % 20) == 0:
+ print('%04d: process image... %s' % (i, img_path))
+ # ims = visualizer.save_images(webpage, visuals, img_path, aspect_ratio=opt.aspect_ratio)
- array_a = np.multiply(im.astype('float64'), frac_a)
- array_b = np.multiply(last_im.astype('float64'), frac_b)
- im = np.add(array_a, array_b).astype('uint8')
- # print(im.shape, im.dtype)
- # last_im = np.roll(tmp_im, 1, axis=1)
- else:
- last_im = im.copy().astype('uint8')
- tmp_im = im.copy().astype('uint8')
- #print(im.shape, im.dtype)
+ im = visuals['fake_B']
+ tmp_path = render_dir + "frame_{:05d}_tmp.png".format(i+1)
+ edges_path = render_dir + "frame_{:05d}.png".format(i+1)
+ render_path = render_dir + "ren_{:05d}.png".format(i+1)
+ sequence_path = A_dir.format(A_offset+i+1)
+ # A_offset
+ image_pil = Image.fromarray(im, mode='RGB')
+ image_pil.save(tmp_path)
+ os.rename(tmp_path, render_path)
- image_pil = Image.fromarray(im, mode='RGB')
- im = np.asarray(image_pil).astype('uint8')
- #print(im.shape, im.dtype)
+ if dataset.name() == 'RecursiveDatasetDataLoader':
+ if data_opt.recursive and last_im is not None:
+ tmp_im = im.copy()
- img = im[:, :, ::-1].copy()
+ if data_opt.sequence:
+ A_im = Image.open(sequence_path).convert('RGB')
+ frac_a = data_opt.recursive_frac
+ frac_b = data_opt.sequence_frac
+ frac_c = 1.0 - frac_a - frac_b
+ array_a = np.multiply(last_im.astype('float64'), frac_a)
+ array_b = np.multiply(A_im.astype('float64'), frac_b)
+ array_c = np.multiply(im.astype('float64'), frac_c)
+ comb_ab = np.add(array_a, array_b)
+ comb_abc = np.add(array_ab, array_c)
+ im = comb_abc.astype('uint8')
- if data_opt.clahe is True:
- lab = cv2.cvtColor(img, cv2.COLOR_BGR2LAB)
- l, a, b = cv2.split(lab)
- clahe = cv2.createCLAHE(clipLimit=data_opt.clip_limit, tileGridSize=(8,8))
- l = clahe.apply(l)
- limg = cv2.merge((l,a,b))
- img = cv2.cvtColor(limg, cv2.COLOR_LAB2BGR)
+ else:
+ frac_a = data_opt.recursive_frac
+ frac_b = 1.0 - frac_a
+ array_a = np.multiply(last_im.astype('float64'), frac_a)
+ array_b = np.multiply(im.astype('float64'), frac_b)
+ im = np.add(array_a, array_b).astype('uint8')
- if data_opt.posterize is True:
- img = cv2.pyrMeanShiftFiltering(img, data_opt.spatial_window, data_opt.color_window)
- if data_opt.grayscale is True:
- img = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
- if data_opt.blur is True:
- img = cv2.GaussianBlur(img, (data_opt.blur_radius, data_opt.blur_radius), data_opt.blur_sigma)
- if data_opt.canny is True:
- img = cv2.Canny(img, data_opt.canny_lo, data_opt.canny_hi)
+ if data_opt.recurse_roll != 0:
+ last_im = np.roll(tmp_im, data_opt.recurse_roll, axis=data_opt.recurse_roll_axis)
- cv2.imwrite(tmp_path, img)
- os.rename(tmp_path, edges_path)
+ else:
+ last_im = im.copy().astype('uint8')
+ tmp_im = im.copy().astype('uint8')
+ #print(im.shape, im.dtype)
- # webpage.save()
+ image_pil = Image.fromarray(im, mode='RGB')
+ im = np.asarray(image_pil).astype('uint8')
+ #print(im.shape, im.dtype)
- # os.remove(render_dir + "frame_00000.png")
+ img = process_image(im)
- print(opt.render_dir)
- video_fn = tag + "_mogrify.mp4"
+ cv2.imwrite(tmp_path, img)
+ os.rename(tmp_path, edges_path)
- cmd = ("ffmpeg", "-i", render_dir + "ren_%05d.png", "-y", "-c:v", "libx264", "-vf", "fps=30", "-pix_fmt", "yuv420p", "-s", "456x256", render_dir + video_fn)
- process = subprocess.Popen(cmd, stdout=subprocess.PIPE)
- output, error = process.communicate()
+ # webpage.save()
- print("________")
+ # os.remove(render_dir + "frame_00000.png")
- cmd = ("scp", render_dir + video_fn, "jules@asdf.us:asdf/neural/")
- process = subprocess.Popen(cmd, stdout=subprocess.PIPE)
- output, error = process.communicate()
+ print(opt.render_dir)
+ video_fn = tag + "_mogrify.mp4"
- print("https://asdf.us/neural/" + video_fn)
+ cmd = ("ffmpeg", "-i", render_dir + "ren_%05d.png", "-y", "-c:v", "libx264", "-vf", "fps=30", "-pix_fmt", "yuv420p", "-s", "456x256", render_dir + video_fn)
+ process = subprocess.Popen(cmd, stdout=subprocess.PIPE)
+ output, error = process.communicate()
+
+ print("________")
+
+ cmd = ("scp", render_dir + video_fn, "jules@asdf.us:asdf/neural/")
+ process = subprocess.Popen(cmd, stdout=subprocess.PIPE)
+ output, error = process.communicate()
+
+ print("https://asdf.us/neural/" + video_fn)