diff options
| author | Jules Laplace <julescarbon@gmail.com> | 2018-06-03 02:25:34 +0200 |
|---|---|---|
| committer | Jules Laplace <julescarbon@gmail.com> | 2018-06-03 02:25:34 +0200 |
| commit | 46fbedf0ad7b167a28daf2030e06f34480576394 (patch) | |
| tree | 81877c97f777a8d705d47bb97ac96e5fa244bd2b /app/client/modules/samplernn/samplernn.actions.js | |
| parent | 2149eb581c35a93d41dbad6e3409c498b4bed804 (diff) | |
add results page
Diffstat (limited to 'app/client/modules/samplernn/samplernn.actions.js')
| -rw-r--r-- | app/client/modules/samplernn/samplernn.actions.js | 109 |
1 files changed, 49 insertions, 60 deletions
diff --git a/app/client/modules/samplernn/samplernn.actions.js b/app/client/modules/samplernn/samplernn.actions.js index 1a60719..e8f8251 100644 --- a/app/client/modules/samplernn/samplernn.actions.js +++ b/app/client/modules/samplernn/samplernn.actions.js @@ -12,16 +12,38 @@ export const load_directories = (id) => (dispatch) => { actions.socket.list_directory({ module: 'samplernn', dir: 'datasets' }), actions.socket.list_directory({ module: 'samplernn', dir: 'results' }), actions.socket.list_directory({ module: 'samplernn', dir: 'output' }), + load_loss()(dispatch), ]).then(res => { // console.log(res) - const [folders, files, tasks, datasets, results, output] = res + const [folders, files, tasks, datasets, results, output, lossReport] = res - const empty_dataset = (name) => ({ - name, - input: [], - checkpoints: [], - output: [], - }) + const unsortedFolder = { + id: 0, + name: 'unsorted', + datasets: [], + } + + const datasetLookup = {} + + const get_dataset = (name, folder=unsortedFolder, date) => { + const dataset = datasetLookup[name] || empty_dataset(name, folder) + if (date) { + dataset.date = dataset.date ? Math.max(+new Date(date), dataset.date) : +new Date(date) + } + return dataset + } + + const empty_dataset = (name, folder=unsortedFolder) => { + const dataset = { + name, + input: [], + checkpoints: [], + output: [], + } + datasetLookup[dataset.name] = dataset + folder.datasets.push(dataset) + return dataset + } // take all of the folders and put them in a lookup const folderLookup = folders.reduce((folderLookup, folder) => { @@ -29,11 +51,7 @@ export const load_directories = (id) => (dispatch) => { folder.datasets = [] return folderLookup }, { - 'unsorted': { - id: 0, - name: 'unsorted', - datasets: [], - } + unsorted: unsortedFolder }) // prepare the files by splitting into two groups @@ -41,61 +59,34 @@ export const load_directories = (id) => (dispatch) => { const ungeneratedFiles = files.filter(file => !file.generated) // build the initial dataset lookup table using the ungenerated files - const datasetLookup = ungeneratedFiles.reduce((datasetLookup, file) => { - file.checkpoints = [] + ungeneratedFiles.reduce((datasetLookup, file) => { if (! file.name) { file.name = (file.opt || {}).token || file.url - // datasetLookup[] = file } const name = (file.name || 'unsorted').split('.')[0] - if (! datasetLookup[name]) { - const dataset = empty_dataset(name) - datasetLookup[dataset.name] = dataset - dataset.input.push(file) - folderLookup[file.folder_id].datasets.push(dataset) - } - else { - datasetLookup[name].input.push(file) - } + const dataset = get_dataset(name, folderLookup[file.folder_id], unsortedFolder, file.date) + dataset.input.push(file) return datasetLookup - }, { - unsorted: empty_dataset('unsorted') - }) + }, datasetLookup) // go over the generated files and add addl datasets (if the files were deleted) generatedFiles.map(file => { const pair = file.name.split('.')[0].split('-') - let dataset = datasetLookup[pair[0]] - if (!dataset) { - dataset = empty_dataset(pair[0]) - datasetLookup[dataset.name] = dataset - folderLookup[file.folder_id].datasets.push(dataset) - } + const dataset = get_dataset(pair[0], folderLookup[file.folder_id], unsortedFolder, file.date) dataset.output.push(file) file.epoch = file.epoch || pair[1] }) - // console.log(datasets) const flatDatasets = datasets.filter(s => s.name.match(/(wav|aiff?|flac|mp3)$/) && !s.dir) const builtDatasets = datasets.filter(s => s.dir) builtDatasets.forEach(dir => { - let dataset = datasetLookup[dir.name] - if (! dataset) { - dataset = empty_dataset(dir.name) - datasetLookup[dataset.name] = dataset - folderLookup.unsorted.datasets.push(dataset) - } + const dataset = get_dataset(dir.name) dataset.isBuilt = true }) flatDatasets.forEach(file => { const name = file.name.split('.')[0] - let dataset = datasetLookup[name] - if (! dataset) { - dataset = empty_dataset(name) - datasetLookup[dataset.name] = dataset - folderLookup.unsorted.datasets.push(dataset) - } + const dataset = get_dataset(name, unsortedFolder, file.date) file.persisted = false dataset.input.push(file) }) @@ -107,14 +98,15 @@ export const load_directories = (id) => (dispatch) => { .map(s => s.split(':')) .filter(b => b.length && b[1]) .reduce((a,b) => (a[b[0]] = b[1]) && a, {}) - checkpoint.name = checkpoint.dataset || checkpoint.exp + checkpoint.name = checkpoint.name || checkpoint.dataset || checkpoint.exp checkpoint.date = s.date checkpoint.dir = s - let dataset = datasetLookup[checkpoint.dataset] - if (! dataset) { - dataset = empty_dataset(checkpoint.dataset) - datasetLookup[dataset.name] = dataset - folderLookup.unsorted.datasets.push(dataset) + checkpoint.persisted = false + const dataset = get_dataset(checkpoint.name, unsortedFolder, checkpoint.date) + const loss = lossReport[checkpoint.name] + if (loss) { + dataset.epoch = checkpoint.epoch = loss.length + checkpoint.training_loss = loss } dataset.checkpoints.push(checkpoint) return checkpoint @@ -122,14 +114,10 @@ export const load_directories = (id) => (dispatch) => { output.map(file => { const pair = file.name.split('.')[0].split('-') - let dataset = datasetLookup[pair[0]] - if (!dataset) { - dataset = empty_dataset(pair[0]) - datasetLookup[dataset.name] = dataset - folderLookup.unsorted.datasets.push(dataset) - } + const dataset = get_dataset(pair[0], unsortedFolder, file.date) file.persisted = false - file.epoch = file.epoch || pair[1].replace(/^\D+/, '') + file.epoch = parseInt(file.epoch || pair[1].replace(/^\D+/, '')) || 0 + dataset.epoch = Math.max(file.epoch, dataset.epoch || 0) dataset.output.push(file) }) @@ -156,7 +144,7 @@ export const load_directories = (id) => (dispatch) => { } export const load_loss = () => dispatch => { - actions.socket.run_script({ module: 'samplernn', activity: 'report' }) + return actions.socket.run_script({ module: 'samplernn', activity: 'report' }) .then(report => { const lossReport = {} report.stdout.split('\n\n').filter(a=>!!a).forEach(data => { @@ -175,6 +163,7 @@ export const load_loss = () => dispatch => { type: types.samplernn.load_loss, lossReport }) + return lossReport }) } |
