import uuidv1 from 'uuid/v1' import socket from '../../socket' import types from '../../types' import * as datasetLoader from '../../dataset/dataset.loader' import actions from '../../actions' import util from '../../util' import bigganModule from './biggan.module' export const load_directories = (id) => (dispatch) => { const module = bigganModule.name util.allProgress([ datasetLoader.load(module), actions.socket.list_directory({ module, dir: 'sequences' }), actions.socket.list_directory({ module, dir: 'datasets' }), actions.socket.list_directory({ module, dir: 'checkpoints' }), // actions.socket.disk_usage({ module, dir: 'datasets' }), ], (percent, i, n) => { console.log('biggan load progress', i, n) dispatch({ type: types.app.load_progress, progress: { i, n }, data: { module: 'biggan' }, }) }).then(res => { const [datasetApiReport, sequences, datasets, checkpoints] = res //, datasets, results, output, datasetUsage, lossReport] = res const { folderLookup, fileLookup, datasetLookup, folders, files, unsortedFolder, resultsFolder, } = datasetApiReport // console.log(datasetUsage) const sequenceDirectories = sequences.filter(s => s.dir) // console.log(sequenceDirectories) sequenceDirectories.forEach(dir => { const dataset = datasetLoader.getDataset(module, datasetLookup, dir.name) dataset.isBuilt = true // console.log(dir.name, dataset) }) datasets.filter(s => s.dir).forEach(dir => { const dataset = datasetLoader.getDataset(module, datasetLookup, dir.name) dataset.hasDataset = true }) const checkpointDirectories = checkpoints.filter(s => s.dir) checkpointDirectories.forEach(dir => { const dataset = datasetLoader.getDataset(module, datasetLookup, dir.name) dataset.hasCheckpoints = true dataset.checkpoints = [dir] }) // console.log(res) // flatDatasets.forEach(file => { // file.uuid = uuidv1() // fileLookup[file.uuid] = file // const name = file.name.split('.')[0] // const dataset = datasetLoader.getDataset(module, datasetLookup, name, unsortedFolder, file.date) // file.persisted = false // dataset.input.push(file.uuid) // }) // // exp:coccokit_3-frame_sizes:8,2-n_rnn:2-dataset:coccokit_3 // const checkpoints = results.filter(s => s.dir).map(s => { // const checkpoint = s.name // .split('-') // .map(s => s.split(':')) // .filter(b => b.length && b[1]) // .reduce((a,b) => (a[b[0]] = b[1]) && a, {}) // checkpoint.name = checkpoint.name || checkpoint.dataset || checkpoint.exp // checkpoint.date = s.date // checkpoint.dir = s // checkpoint.persisted = false // const dataset = datasetLoader.getDataset(module, datasetLookup, checkpoint.name, unsortedFolder, checkpoint.date) // const loss = lossReport[checkpoint.name] // if (loss) { // dataset.epoch = checkpoint.epoch = loss.length // checkpoint.training_loss = loss // } // dataset.checkpoints.push(checkpoint) // return checkpoint // }) // output.map(file => { // file.uuid = uuidv1() // fileLookup[file.uuid] = file // const pair = file.name.split('.')[0].split('-') // const dataset = datasetLoader.getDataset(module, datasetLookup, pair[0], unsortedFolder, file.date) // file.persisted = false // file.epoch = parseInt(file.epoch || pair[1].replace(/^\D+/, '')) || 0 // dataset.epoch = Math.max(file.epoch, dataset.epoch || 0) // // here check if the file exists in dataset, if so just check that it's persisted // const found = dataset.output.some(file_id => { // // if (f.name === // if (fileLookup[file_id].name === file.name) { // fileLookup[file_id].persisted = true // return true // } // return false // }) // if (! found) { // dataset.output.push(file.uuid) // } // }) dispatch({ type: types.dataset.load, data: { module, folderLookup, fileLookup, datasetLookup, folders, files, sequences: sequenceDirectories, datasets, checkpoints: checkpointDirectories, resultsFolder, }, }) if (id) { console.log('folder id', id) dispatch({ type: types.dataset.set_folder, data: { folder_id: id, module }, }) } }).catch(e => { console.error(e) }) if (id) { console.log('folder id', id) dispatch({ type: types.dataset.set_folder, data: { folder_id: id, module }, }) } } export const load_results = (id) => (dispatch) => { const module = bigganModule.name util.allProgress([ actions.folder.index({ module, name: 'results' }), actions.file.index({ module, generated: 1, limit: 250 }), // actions.socket.list_directory({ module, dir: 'renders' }), // actions.socket.list_sequences({ module, dir: 'results' }), ], (percent, i, n) => { console.log('biggan load progress', i, n) dispatch({ type: types.app.load_progress, progress: { i, n }, data: { module: 'biggan' }, }) }).then(res => { const [folders, files] = res //, datasets, results, output, datasetUsage, lossReport] = res // console.log(files, results, renders) dispatch({ type: types.biggan.load_results, results: { resultsFolder: folders[0], files, } }) }) } export const load_encodings = () => dispatch => { const module = bigganModule.name console.log(actions) util.allProgress([ actions.folder.index({ module }), actions.file.index({ module, datatype: 'image', generated: 1, }), ], (percent, i, n) => { // loading function }).then(res => { const [folders, files] = res const folder_name_lookup = {} const encodings = {} folders.forEach(folder => { folder_name_lookup[folder.id] = folder.name encodings[folder.name] = [] }) files.forEach(file => { const folder_name = folder_name_lookup[file.folder_id] encodings[folder_name].push(file) }) folders.forEach(folder => { if (!encodings[folder.name].length) { delete encodings[folder.name] } }) dispatch({ type: types.biggan.load_encodings, encodings }) }) }