import path from 'path' import fs from 'fs' const name = 'pix2pixhd' const cwd = process.env.PIX2PIXHD_CWD || path.join(process.env.HOME, 'code/' + name + '/') const env = { LD_LIBRARY_PATH: '/usr/local/cuda/lib64:' + process.env.TENSORRT_LIB_PATH, } const fetch = { type: 'perl', script: 'get.pl', params: (task) => { console.log(task) return [ task.opt.url ] }, listen: (task, res, i) => { // relay the new dataset name from youtube-dl or w/e const lines = res.split('\n') for (let line of lines) { // console.log(line) if ( line.match(/^created dataset: /) ) { let tag = line.split(': ')[1].trim() task.dataset = tag // task.opt.filename = filename console.log(">>>>>> created dataset", tag) return { type: 'progress', action: 'resolve_dataset', task } } } return null }, after: 'build', } const build = { type: 'perl', script: 'build_dataset.pl', params: (task) => { return [ task.dataset, ] } } const train = { type: 'pytorch', script: 'train.py', params: (task) => { let epoch = 0 const dataset = task.dataset.toLowerCase() const datasets_path = path.join(cwd, 'datasets', dataset) const checkpoints_path = path.join(cwd, 'checkpoints', dataset) const iter_txt = path.join(checkpoints_path, 'iter.txt') const checkpoint_path = path.join(checkpoints_path, 'latest_net_G.pth') console.log(dataset, iter_txt) if (fs.existsSync(iter_txt) && fs.existsSync(checkpoint_path)) { const iter = fs.readFileSync(iter_txt).toString().split('\n'); console.log(iter) epoch = iter[0] || 0 console.log(task.module, dataset, '=>', epoch, task.epochs) } else { console.log(task.module, dataset, '=>', 'starting new training') } let args = [ '--dataroot', datasets_path, '--module_name', task.module, '--name', dataset, '--model', 'pix2pixHD', '--label_nc', 0, '--no_instance', '--niter', task.epochs || 1, '--niter_decay', 0, '--save_epoch_freq', 1, ] if (epoch) { args = args.concat([ '--which_epoch', 'latest', '--continue_train', ]) } return args }, listen: (task, res, i) => { console.log(res) return null }, } const generate = { type: 'pytorch', script: 'test.py', params: (task) => { let epoch = 0 const dataset = task.dataset.toLowerCase() const datasets_path = path.join(cwd, 'datasets', dataset) const checkpoints_path = path.join(cwd, 'checkpoints', dataset) const iter_txt = path.join(checkpoints_path, 'iter.txt') console.log(dataset, iter_txt) if (fs.existsSync(iter_txt)) { const iter = fs.readFileSync(iter_txt).toString().split('\n'); console.log(iter) epoch = iter[0] || 0 console.log(task.module, dataset, '=>', epoch, task.epochs) } else { console.log(task.module, dataset, '=>', 'starting new training') } return [ '--dataroot', datasets_path, '--module_name', task.module, '--name', dataset, '--model', 'pix2pixHD', '--label_nc', 0, '--no_instance', '--niter', task.epochs, '--niter_decay', 0, '--save_epoch_freq', 1, ] }, after: 'render', } const augment = { type: 'pytorch', script: 'augment.py', params: (task) => { let epoch = 0 const dataset = task.dataset.toLowerCase() const datasets_path = path.join(cwd, 'datasets', dataset) const checkpoints_path = path.join(cwd, 'checkpoints', dataset) // supply render_dir let args = [ '--dataroot', datasets_path, '--results_dir', './recursive', '--module_name', task.module, '--name', dataset, '--sequence-name', dataset, '--model', 'pix2pixHD', '--label_nc', 0, '--no_instance', '--augment-take', task.opt.augment_take, '--augment-make', task.opt.augment_make, '--which_epoch', task.opt.epoch || "latest", ] if (!!task.opt.augment_name) { args.push('--augment-name', task.opt.augment_name) } if (!!task.opt.no_symlinks) { args.push('--no-symlinks') } return args }, listen: (task, res, i) => { // extract the new path name const lines = res.split('\n') for (let line of lines) { console.log(line) if ( line.match(/^render_dir: /) ) { let tag = line.split(': ')[1].trim() task.opt.render_dir = tag // task.opt.filename = filename console.log(">>>>>> created dataset", tag) return { type: 'progress', action: 'resolve_dataset', task } } } return null }, after: 'render_recursive', } const live = { type: 'pytorch', script: 'live.py', params: (task) => { console.log(task) const opt = task.opt || {} return [ '--phase', 'recursive', '--dataroot', path.join(cwd, 'sequences', task.dataset), '--start_img', path.join(cwd, 'sequences', task.dataset, 'frame_00001.png'), '--checkpoint-name', task.checkpoint, '--experiment', task.checkpoint, '--name', task.checkpoint, '--module_name', 'pix2pixHD', '--sequence-name', task.dataset, '--sequence', '--sequence-frac', 0.3, '--process-frac', 0.5, '--label_nc', '0', '--no_instance', '--how_many', 10000, '--transition-period', 1000, '--just-copy', '--poll_delay', opt.poll_delay || 0.09, '--which_epoch', 'latest', '--norm', 'batch', '--store_b', // uncomment this line to store all live output ] }, listen: (task, res, i) => { // relay the new dataset name from youtube-dl or w/e const lines = res.split('\n') for (let line of lines) { // console.log(line) if ( line.match(/^final result: /) ) { let tag = line.split(': ')[1].trim() task.dataset = tag console.log(">>>>>> recording live to", tag) return { type: 'progress', action: 'resolve_dataset', task } } } return null }, after: 'render', } const render = { type: 'perl', script: 'dir-to-movie.pl', params: (task) => { return [ '--path', 'results', '--tag', task.dataset, '--module', task.module, '--endpoint', process.env.API_REMOTE + '/api/folder/' + task.opt.folder_id + '/upload/', ] } } const render_recursive = { type: 'perl', script: 'dir-to-movie.pl', params: (task) => { if (!task.opt.mov) { console.log('will not render mov') return 'CANCEL' } if (!task.opt.render_dir) { return 'CANCEL'; } const render_dir = task.opt.render_dir.replace('./recursive/', '') console.log('rendering recursve path:', render_dir) return [ '--path', 'recursive', '--tag', render_dir, '--module', task.module, '--endpoint', process.env.API_REMOTE + '/api/folder/' + task.opt.folder_id + '/upload/', '--prefix', 'recur', ] } } const splice = { type: 'perl', script: 'splice.pl', params: (task) => { console.log(task.opt.selection) return [ '--dataset', task.dataset, '--sequence', task.opt.sequence, '--start_frame', task.opt.selection.start.i, '--end_frame', task.opt.selection.end.i, '--module', task.module, '--folder_id', task.opt.folder_id, '--endpoint', process.env.API_REMOTE + '/api/file/', ] }, after: 'train' } const uprez = { type: 'pytorch', script: 'uprez.py', params: task => { console.log(task) /* set folder based on whether the file is a result or recursive mp4 */ let folder if (task.opt.is_result) { folder = './results/' + task.dataset } else if (task.opt.is_recursive) { folder = './recursive/' + task.dataset + '/' + task.opt.name + '/' } return [ '--folder', folder, '--folder_id', task.opt.folder_id, '--dataset', task.dataset, '--out_fn', task.dataset + "_" + Date.now(), ] } } function pad(num, size) { if (isNaN(parseInt(num))) return num; var s = num + ""; while (s.length < size) s = "0" + s; return s; } export default { name, cwd, env, activities: { fetch, build, train, generate, augment, live, render, render_recursive, splice, uprez, } }