diff options
| author | Jules Laplace <julescarbon@gmail.com> | 2020-01-03 19:06:19 +0100 |
|---|---|---|
| committer | Jules Laplace <julescarbon@gmail.com> | 2020-01-03 19:06:19 +0100 |
| commit | bdfc18141fea55568227de48e9b6cb61538ea6ce (patch) | |
| tree | e10ea5b6bee8a68c7de379ff6adfdb6852144cfb /app/client/modules/biggan/biggan.actions.js | |
| parent | ee2471bb4ad8e190c1f2c1c47817046a2c4d6b30 (diff) | |
biggan new, results js
Diffstat (limited to 'app/client/modules/biggan/biggan.actions.js')
| -rw-r--r-- | app/client/modules/biggan/biggan.actions.js | 168 |
1 files changed, 168 insertions, 0 deletions
diff --git a/app/client/modules/biggan/biggan.actions.js b/app/client/modules/biggan/biggan.actions.js index 188a0d8..df78481 100644 --- a/app/client/modules/biggan/biggan.actions.js +++ b/app/client/modules/biggan/biggan.actions.js @@ -10,3 +10,171 @@ import actions from '../../actions' import util from '../../util' import bigganModule from './biggan.module' + +export const load_directories = (id) => (dispatch) => { + const module = bigganModule.name + util.allProgress([ + datasetLoader.load(module), + actions.socket.list_directory({ module, dir: 'sequences' }), + actions.socket.list_directory({ module, dir: 'datasets' }), + actions.socket.list_directory({ module, dir: 'checkpoints' }), + // actions.socket.disk_usage({ module, dir: 'datasets' }), + ], (percent, i, n) => { + console.log('biggan load progress', i, n) + dispatch({ + type: types.app.load_progress, + progress: { i, n }, + data: { module: 'biggan' }, + }) + }).then(res => { + const [datasetApiReport, sequences, datasets, checkpoints] = res //, datasets, results, output, datasetUsage, lossReport] = res + const { + folderLookup, + fileLookup, + datasetLookup, + folders, + files, + unsortedFolder, + resultsFolder, + } = datasetApiReport + // console.log(datasetUsage) + + const sequenceDirectories = sequences.filter(s => s.dir) + // console.log(sequenceDirectories) + sequenceDirectories.forEach(dir => { + const dataset = datasetLoader.getDataset(module, datasetLookup, dir.name) + dataset.isBuilt = true + // console.log(dir.name, dataset) + }) + + datasets.filter(s => s.dir).forEach(dir => { + const dataset = datasetLoader.getDataset(module, datasetLookup, dir.name) + dataset.hasDataset = true + }) + + const checkpointDirectories = checkpoints.filter(s => s.dir) + checkpointDirectories.forEach(dir => { + const dataset = datasetLoader.getDataset(module, datasetLookup, dir.name) + dataset.hasCheckpoints = true + dataset.checkpoints = [dir] + }) + + // console.log(res) + + // flatDatasets.forEach(file => { + // file.uuid = uuidv1() + // fileLookup[file.uuid] = file + // const name = file.name.split('.')[0] + // const dataset = datasetLoader.getDataset(module, datasetLookup, name, unsortedFolder, file.date) + // file.persisted = false + // dataset.input.push(file.uuid) + // }) + + // // exp:coccokit_3-frame_sizes:8,2-n_rnn:2-dataset:coccokit_3 + // const checkpoints = results.filter(s => s.dir).map(s => { + // const checkpoint = s.name + // .split('-') + // .map(s => s.split(':')) + // .filter(b => b.length && b[1]) + // .reduce((a,b) => (a[b[0]] = b[1]) && a, {}) + // checkpoint.name = checkpoint.name || checkpoint.dataset || checkpoint.exp + // checkpoint.date = s.date + // checkpoint.dir = s + // checkpoint.persisted = false + // const dataset = datasetLoader.getDataset(module, datasetLookup, checkpoint.name, unsortedFolder, checkpoint.date) + // const loss = lossReport[checkpoint.name] + // if (loss) { + // dataset.epoch = checkpoint.epoch = loss.length + // checkpoint.training_loss = loss + // } + // dataset.checkpoints.push(checkpoint) + // return checkpoint + // }) + + // output.map(file => { + // file.uuid = uuidv1() + // fileLookup[file.uuid] = file + // const pair = file.name.split('.')[0].split('-') + // const dataset = datasetLoader.getDataset(module, datasetLookup, pair[0], unsortedFolder, file.date) + // file.persisted = false + // file.epoch = parseInt(file.epoch || pair[1].replace(/^\D+/, '')) || 0 + // dataset.epoch = Math.max(file.epoch, dataset.epoch || 0) + // // here check if the file exists in dataset, if so just check that it's persisted + // const found = dataset.output.some(file_id => { + // // if (f.name === + // if (fileLookup[file_id].name === file.name) { + // fileLookup[file_id].persisted = true + // return true + // } + // return false + // }) + // if (! found) { + // dataset.output.push(file.uuid) + // } + // }) + + dispatch({ + type: types.dataset.load, + data: { + module, + folderLookup, + fileLookup, + datasetLookup, + folders, files, + sequences: sequenceDirectories, + datasets, + checkpoints: checkpointDirectories, + resultsFolder, + }, + }) + if (id) { + console.log('folder id', id) + dispatch({ + type: types.dataset.set_folder, + data: { + folder_id: id, + module + }, + }) + } + }).catch(e => { + console.error(e) + }) + if (id) { + console.log('folder id', id) + dispatch({ + type: types.dataset.set_folder, + data: { + folder_id: id, + module + }, + }) + } +} + +export const load_results = (id) => (dispatch) => { + const module = bigganModule.name + util.allProgress([ + actions.folder.index({ name: 'results' }), + actions.file.index({ module, generated: 1, limit: 250 }), + // actions.socket.list_directory({ module, dir: 'renders' }), + // actions.socket.list_sequences({ module, dir: 'results' }), + ], (percent, i, n) => { + console.log('biggan load progress', i, n) + dispatch({ + type: types.app.load_progress, + progress: { i, n }, + data: { module: 'biggan' }, + }) + }).then(res => { + const [folders, files] = res //, datasets, results, output, datasetUsage, lossReport] = res + // console.log(files, results, renders) + dispatch({ + type: types.biggan.load_results, + results: { + resultsFolder: folders[0], + files, + } + }) + }) +} |
