diff options
Diffstat (limited to 'app/client/modules/samplernn')
| -rw-r--r-- | app/client/modules/samplernn/index.js | 4 | ||||
| -rw-r--r-- | app/client/modules/samplernn/samplernn.actions.js | 101 | ||||
| -rw-r--r-- | app/client/modules/samplernn/samplernn.datasets.js | 6 |
3 files changed, 74 insertions, 37 deletions
diff --git a/app/client/modules/samplernn/index.js b/app/client/modules/samplernn/index.js index e8254dd..7795df0 100644 --- a/app/client/modules/samplernn/index.js +++ b/app/client/modules/samplernn/index.js @@ -9,8 +9,8 @@ function router () { <div> <Route exact path='/samplernn/loss/' component={SampleRNNLoss} /> <Route exact path='/samplernn/inspect/' component={SampleRNNInspect} /> - <Route exact path='/samplernn/datasets/' component={SampleRNNDatasets} /> - <Route exact path='/samplernn/dataset/new/' component={SampleRNNDatasets} /> + <Route exact path='/samplernn/datasets/:id/' component={SampleRNNDatasets} /> + <Route exact path='/samplernn/datasets/new/' component={SampleRNNDatasets} /> </div> ) } diff --git a/app/client/modules/samplernn/samplernn.actions.js b/app/client/modules/samplernn/samplernn.actions.js index 4b8d00b..b69da2e 100644 --- a/app/client/modules/samplernn/samplernn.actions.js +++ b/app/client/modules/samplernn/samplernn.actions.js @@ -3,68 +3,83 @@ import types from '../../types' import actions from '../../actions' -export const load_directories = () => (dispatch) => { +export const load_directories = (id) => (dispatch) => { // console.log(actions) Promise.all([ actions.folder.index({ module: 'samplernn' }), actions.file.index({ module: 'samplernn' }), actions.task.index({ module: 'samplernn' }), - actions.socket.list_directory({ module: 'samplernn', dir: 'results' }), actions.socket.list_directory({ module: 'samplernn', dir: 'datasets' }), + actions.socket.list_directory({ module: 'samplernn', dir: 'results' }), + actions.socket.list_directory({ module: 'samplernn', dir: 'output' }), ]).then(res => { // console.log(res) - const [folders, files, tasks, results, datasets] = res + const [folders, files, tasks, datasets, results, output] = res + + const empty_dataset = (name) => ({ + name, + input: null, + checkpoints: [], + results: [], + }) + // take all of the folders and put them in a lookup const folderLookup = folders.reduce((folderLookup, folder) => { folderLookup[folder.id] = folder - folder.files = [] - folder.results = [] + folder.datasets = [] return folderLookup }, { 'unsorted': { id: 0, name: 'unsorted', - files: [], - results: [], + datasets: [], } }) + // prepare the files by splitting into two groups const processedFiles = files.filter(file => file.processed) const unprocessedFiles = files.filter(file => !file.processed) - const fileLookup = unprocessedFiles.reduce((fileLookup, file) => { + + // build the initial dataset lookup table using the unprocessed files + const datasetLookup = unprocessedFiles.reduce((datasetLookup, file) => { file.checkpoints = [] if (! file.name) { file.name = (file.opt || {}).token || file.url - // fileLookup[(file.name || 'unsorted').split('.')[0]] = file - } else { - fileLookup[(file.name).split('.')[0]] = file + // datasetLookup[(file.name || 'unsorted').split('.')[0]] = file } - folderLookup[file.folder_id] && folderLookup[file.folder_id].files.push(file) - return fileLookup + const dataset = empty_dataset((file.name).split('.')[0]) + datasetLookup[dataset.name] = dataset + dataset.input = input + folderLookup[file.folder_id].datasets.push(dataset) + return datasetLookup }, { - unsorted: { - checkpoints: [], - results: [], - } + unsorted: empty_dataset('unsorted') }) - processedFiles.map(result => { - const pair = result.name.split('.')[0].split('-') - const file = fileLookup[pair[0]] - if (file) { - file.results.push(result) - } else { - folderLookup[file.folder_id] && folderLookup[file.folder_id].results.push(file) + + // go over the processed files and add addl datasets (if the files were deleted) + processedFiles.map(file => { + const pair = file.name.split('.')[0].split('-') + let dataset = datasetLookup[pair[0]] + if (!dataset) { + dataset = empty_dataset(pair[0]) + datasetLookup[dataset.name] = dataset + folderLookup[file.folder_id].datasets.push(dataset) } - result.epoch = result.epoch || pair[1] + dataset.results.push(file) + file.epoch = file.epoch || pair[1] }) - folderLookup.unsorted.files.push(fileLookup.unsorted) // console.log(datasets) // const flatDatasets = datasets.filter(s => s.name.match(/(wav|aiff?|flac|mp3)$/) && !s.dir) const builtDatasets = datasets.filter(s => s.dir) - builtDatasets.forEach(dataset => { - const file = fileLookup[dataset.name] || fileLookup.unsorted - file.hasDataset = true + builtDatasets.forEach(dir => { + let dataset = datasetLookup[dir.name] + if (! dataset) { + dataset = empty_dataset(dir.name) + datasetLookup[dataset.name] = dataset + folderLookup.unsorted.datasets.push(dataset) + } + dataset.isBuilt = true }) // exp:coccokit_3-frame_sizes:8,2-n_rnn:2-dataset:coccokit_3 @@ -75,13 +90,30 @@ export const load_directories = () => (dispatch) => { .filter(b => b.length && b[1]) .reduce((a,b) => (a[b[0]] = b[1]) && a, {}) // console.log(checkpoint.dataset) - checkpoint.name = checkpoint.dataset + checkpoint.name = checkpoint.dataset || checkpoint.exp checkpoint.dir = s - const file = fileLookup[checkpoint.dataset] || fileLookup.unsorted - file.checkpoints.push(checkpoint) + let dataset = datasetLookup[checkpoint.dataset] + if (! dataset) { + dataset = empty_dataset(checkpoint.dataset) + datasetLookup[dataset.name] = dataset + folderLookup.unsorted.datasets.push(dataset) + } + dataset.checkpoints.push(checkpoint) return checkpoint }) + output.map(file => { + const pair = file.name.split('.')[0].split('-') + let dataset = datasetLookup[pair[0]] + if (!dataset) { + dataset = empty_dataset(pair[0]) + datasetLookup[dataset.name] = dataset + folderLookup.unsorted.datasets.push(dataset) + } + dataset.results.push(file) + file.epoch = file.epoch || pair[1] + }) + dispatch({ type: types.samplernn.init, data: { @@ -89,12 +121,13 @@ export const load_directories = () => (dispatch) => { folders, files, checkpoints, builtDatasets, + output, }, }) - if (folders.length) { + if (id) { dispatch({ type: types.samplernn.set_folder, - folder: folders[0], + folder: folderLookup[parseInt(id)], }) } }).catch(e => { diff --git a/app/client/modules/samplernn/samplernn.datasets.js b/app/client/modules/samplernn/samplernn.datasets.js index ac0a667..63c4eaf 100644 --- a/app/client/modules/samplernn/samplernn.datasets.js +++ b/app/client/modules/samplernn/samplernn.datasets.js @@ -18,7 +18,11 @@ class SampleRNNDatasets extends Component { super() this.fileOptions = this.fileOptions.bind(this) this.pickFile = this.pickFile.bind(this) - props.actions.load_directories() + let id = props.match.params.id + if (! id && props.location.pathname.match(/\/new\//)) { + id = 'new' + } + props.actions.load_directories(id) } pickFile(file){ console.log('pick', file) |
