From cf6f1c58a7298b0af1c13bb701032017563a6ed8 Mon Sep 17 00:00:00 2001 From: Jules Laplace Date: Sat, 2 Jun 2018 22:42:05 +0200 Subject: basic dataset viewer with stuff clustered how i want it --- app/client/modules/samplernn/samplernn.actions.js | 42 +++++++++++++---------- 1 file changed, 24 insertions(+), 18 deletions(-) (limited to 'app/client/modules/samplernn/samplernn.actions.js') diff --git a/app/client/modules/samplernn/samplernn.actions.js b/app/client/modules/samplernn/samplernn.actions.js index b69da2e..cb8b47b 100644 --- a/app/client/modules/samplernn/samplernn.actions.js +++ b/app/client/modules/samplernn/samplernn.actions.js @@ -18,14 +18,14 @@ export const load_directories = (id) => (dispatch) => { const empty_dataset = (name) => ({ name, - input: null, + input: [], checkpoints: [], - results: [], + output: [], }) // take all of the folders and put them in a lookup const folderLookup = folders.reduce((folderLookup, folder) => { - folderLookup[folder.id] = folder + folderLookup[folder.id] = { id: folder.id, name: folder.name, folder, datasets: [] } folder.datasets = [] return folderLookup }, { @@ -37,27 +37,33 @@ export const load_directories = (id) => (dispatch) => { }) // prepare the files by splitting into two groups - const processedFiles = files.filter(file => file.processed) - const unprocessedFiles = files.filter(file => !file.processed) + const generatedFiles = files.filter(file => file.generated) + const ungeneratedFiles = files.filter(file => !file.generated) - // build the initial dataset lookup table using the unprocessed files - const datasetLookup = unprocessedFiles.reduce((datasetLookup, file) => { + // build the initial dataset lookup table using the ungenerated files + const datasetLookup = ungeneratedFiles.reduce((datasetLookup, file) => { file.checkpoints = [] if (! file.name) { file.name = (file.opt || {}).token || file.url - // datasetLookup[(file.name || 'unsorted').split('.')[0]] = file + // datasetLookup[] = file + } + const name = (file.name || 'unsorted').split('.')[0] + if (! datasetLookup[name]) { + const dataset = empty_dataset(name) + datasetLookup[dataset.name] = dataset + dataset.input.push(file) + folderLookup[file.folder_id].datasets.push(dataset) + } + else { + datasetLookup[name].input.push(file) } - const dataset = empty_dataset((file.name).split('.')[0]) - datasetLookup[dataset.name] = dataset - dataset.input = input - folderLookup[file.folder_id].datasets.push(dataset) return datasetLookup }, { unsorted: empty_dataset('unsorted') }) - // go over the processed files and add addl datasets (if the files were deleted) - processedFiles.map(file => { + // go over the generated files and add addl datasets (if the files were deleted) + generatedFiles.map(file => { const pair = file.name.split('.')[0].split('-') let dataset = datasetLookup[pair[0]] if (!dataset) { @@ -65,7 +71,7 @@ export const load_directories = (id) => (dispatch) => { datasetLookup[dataset.name] = dataset folderLookup[file.folder_id].datasets.push(dataset) } - dataset.results.push(file) + dataset.output.push(file) file.epoch = file.epoch || pair[1] }) @@ -89,7 +95,6 @@ export const load_directories = (id) => (dispatch) => { .map(s => s.split(':')) .filter(b => b.length && b[1]) .reduce((a,b) => (a[b[0]] = b[1]) && a, {}) - // console.log(checkpoint.dataset) checkpoint.name = checkpoint.dataset || checkpoint.exp checkpoint.dir = s let dataset = datasetLookup[checkpoint.dataset] @@ -110,7 +115,7 @@ export const load_directories = (id) => (dispatch) => { datasetLookup[dataset.name] = dataset folderLookup.unsorted.datasets.push(dataset) } - dataset.results.push(file) + dataset.output.push(file) file.epoch = file.epoch || pair[1] }) @@ -125,9 +130,10 @@ export const load_directories = (id) => (dispatch) => { }, }) if (id) { + let folder = id === 'unsorted' ? folderLookup.unsorted : folderLookup[id] dispatch({ type: types.samplernn.set_folder, - folder: folderLookup[parseInt(id)], + folder: folder, }) } }).catch(e => { -- cgit v1.2.3-70-g09d2