diff options
Diffstat (limited to 'app/client')
| -rw-r--r-- | app/client/dataset/dataset.loader.js | 108 | ||||
| -rw-r--r-- | app/client/modules/samplernn/samplernn.actions.js | 113 | ||||
| -rw-r--r-- | app/client/modules/samplernn/samplernn.datasets.js | 1 |
3 files changed, 130 insertions, 92 deletions
diff --git a/app/client/dataset/dataset.loader.js b/app/client/dataset/dataset.loader.js new file mode 100644 index 0000000..6757593 --- /dev/null +++ b/app/client/dataset/dataset.loader.js @@ -0,0 +1,108 @@ + +import actions from '../actions' + +const unsortedFolders = {} + +export const unsortedFolder = (module, instantiate=false) => { + if (!unsortedFolders[module] || instantiate) { + const folder = { + id: 0, + module, + name: 'unsorted', + files: [], + datasets: [], + } + unsortedFolders[module] = folder + } + return unsortedFolders[module] +} + +export const emptyDataset = (module, datasetLookup, name, folder) => { + const dataset = { + name, + input: [], + checkpoints: [], + output: [], + } + datasetLookup[name] = dataset + folder = folder || unsortedFolder(module) + folder.datasets.push(name) + return dataset +} + +export const getDataset = (module, datasetLookup, name, folder, date) => { + const dataset = datasetLookup[name] || emptyDataset(module, datasetLookup, name, folder) + folder = folder || unsortedFolder(module) + if (date) { + dataset.date = (dataset.date && ! isNaN(dataset.date)) ? Math.max(+new Date(date), dataset.date) : +new Date(date) + } + return dataset +} + +export const load = module => { + return Promise.all([ + actions.folder.index({ module }), + actions.file.index({ module }), + ]).then(res => { + const [folders, files] = res + + let datasetLookup = {} + let folderLookup = {} + let fileLookup = {} + + // take all of the folders and put them in a lookup + folderLookup = folders.reduce((folderLookup, folder) => { + folderLookup[folder.id] = { + id: folder.id, + name: folder.name, + folder, + files: [], + datasets: [], + } + return folderLookup + }, { + unsorted: unsortedFolder(module, true) + }) + + // prepare the files by splitting into two groups + const generatedFiles = files.filter(file => file.generated) + const ungeneratedFiles = files.filter(file => !file.generated) + + // build the initial dataset lookup table using the ungenerated files + ungeneratedFiles.reduce((datasetLookup, file) => { + fileLookup[file.id] = file + if (! file.name) { + file.name = (file.opt || {}).token || file.url + } + const name = (file.name || 'unsorted').split('.')[0] + const folder = folderLookup[file.folder_id] || unsortedFolder(module) + const dataset = getDataset(module, datasetLookup, name, folder, file.date || file.created_at) + if (file.url.match(file.name)) file.persisted = true + dataset.input.push(file.id) + folder.files.push(file.id) + return datasetLookup + }, datasetLookup) + + // go over the generated files and add addl datasets (if the files were deleted) + generatedFiles.map(file => { + fileLookup[file.id] = file + const pair = file.name.split('.')[0].split('-') + const folder = folderLookup[file.folder_id] || unsortedFolder(module) + const dataset = getDataset(module, datasetLookup, pair[0], folder, file.date || file.created_at) + dataset.output.push(file.id) + folder.files.push(file.id) + file.epoch = file.epoch || pair[1] + }) + + return { + folderLookup, + fileLookup, + datasetLookup, + folders, + files, + unsortedFolder: folderLookup.unsorted, + } + }).catch(e => { + console.error(e) + }) +} diff --git a/app/client/modules/samplernn/samplernn.actions.js b/app/client/modules/samplernn/samplernn.actions.js index 357617a..2be59ed 100644 --- a/app/client/modules/samplernn/samplernn.actions.js +++ b/app/client/modules/samplernn/samplernn.actions.js @@ -3,111 +3,43 @@ import uuidv1 from 'uuid/v1' import socket from '../../socket' import types from '../../types' +import * as datasetLoader from '../../dataset/dataset.loader' + import actions from '../../actions' import { allProgress } from '../../util' export const load_directories = (id) => (dispatch) => { - // console.log(actions) - dispatch({ type: types.app.load_progress, progress: { i: 0, n: 7 }}) + const module = 'samplernn' allProgress([ - actions.folder.index({ module: 'samplernn' }), - actions.file.index({ module: 'samplernn' }), - actions.task.index({ module: 'samplernn' }), - actions.socket.list_directory({ module: 'samplernn', dir: 'datasets' }), - actions.socket.list_directory({ module: 'samplernn', dir: 'results' }), - actions.socket.list_directory({ module: 'samplernn', dir: 'output' }), - actions.socket.disk_usage({ module: 'samplernn', dir: 'datasets' }), + datasetLoader.load(module), + actions.task.index({ module }), + actions.socket.list_directory({ module, dir: 'datasets' }), + actions.socket.list_directory({ module, dir: 'results' }), + actions.socket.list_directory({ module, dir: 'output' }), + actions.socket.disk_usage({ module, dir: 'datasets' }), load_loss()(dispatch), ], (percent, i, n) => { dispatch({ type: types.app.load_progress, progress: { i, n }}) }).then(res => { // console.log(res) - const [folders, files, tasks, datasets, results, output, datasetUsage, lossReport] = res + const [datasetApiReport, tasks, datasets, results, output, datasetUsage, lossReport] = res + const { + folderLookup, + fileLookup, + datasetLookup, + folders, + files, + unsortedFolder, + } = datasetApiReport console.log(datasetUsage) - const unsortedFolder = { - id: 0, - name: 'unsorted', - files: [], - datasets: [], - } - - let datasetLookup = {} - let folderLookup = {} - let fileLookup = {} - let taskLookup = {} - - const get_dataset = (name, folder=unsortedFolder, date) => { - const dataset = datasetLookup[name] || empty_dataset(name, folder) - if (date) { - dataset.date = (dataset.date && ! isNaN(dataset.date)) ? Math.max(+new Date(date), dataset.date) : +new Date(date) - } - return dataset - } - - const empty_dataset = (name, folder=unsortedFolder) => { - const dataset = { - name, - input: [], - checkpoints: [], - output: [], - } - datasetLookup[name] = dataset - folder.datasets.push(name) - return dataset - } - - // take all of the folders and put them in a lookup - folderLookup = folders.reduce((folderLookup, folder) => { - folderLookup[folder.id] = { - id: folder.id, - name: folder.name, - folder, - files: [], - datasets: [], - } - return folderLookup - }, { - unsorted: unsortedFolder - }) - - // prepare the files by splitting into two groups - const generatedFiles = files.filter(file => file.generated) - const ungeneratedFiles = files.filter(file => !file.generated) - - // build the initial dataset lookup table using the ungenerated files - ungeneratedFiles.reduce((datasetLookup, file) => { - fileLookup[file.id] = file - if (! file.name) { - file.name = (file.opt || {}).token || file.url - } - const name = (file.name || 'unsorted').split('.')[0] - const folder = folderLookup[file.folder_id] || unsortedFolder - const dataset = get_dataset(name, folder, file.date || file.created_at) - if (file.url.match(file.name)) file.persisted = true - dataset.input.push(file.id) - folder.files.push(file.id) - return datasetLookup - }, datasetLookup) - - // go over the generated files and add addl datasets (if the files were deleted) - generatedFiles.map(file => { - fileLookup[file.id] = file - const pair = file.name.split('.')[0].split('-') - const folder = folderLookup[file.folder_id] || unsortedFolder - const dataset = get_dataset(pair[0], folder, file.date || file.created_at) - dataset.output.push(file.id) - folder.files.push(file.id) - file.epoch = file.epoch || pair[1] - }) - // also show the various flat audio files we have, in the input area.. const flatDatasets = datasets.filter(s => s.name.match(/(wav|aiff?|flac|mp3)$/) && !s.dir) const builtDatasets = datasets.filter(s => s.dir) builtDatasets.forEach(dir => { - const dataset = get_dataset(dir.name) + const dataset = datasetLoader.getDataset(module, datasetLookup, dir.name) dataset.isBuilt = true }) @@ -115,7 +47,7 @@ export const load_directories = (id) => (dispatch) => { file.uuid = uuidv1() fileLookup[file.uuid] = file const name = file.name.split('.')[0] - const dataset = get_dataset(name, unsortedFolder, file.date) + const dataset = datasetLoader.getDataset(module, datasetLookup, name, unsortedFolder, file.date) file.persisted = false dataset.input.push(file.uuid) }) @@ -131,7 +63,7 @@ export const load_directories = (id) => (dispatch) => { checkpoint.date = s.date checkpoint.dir = s checkpoint.persisted = false - const dataset = get_dataset(checkpoint.name, unsortedFolder, checkpoint.date) + const dataset = datasetLoader.getDataset(module, datasetLookup, checkpoint.name, unsortedFolder, checkpoint.date) const loss = lossReport[checkpoint.name] if (loss) { dataset.epoch = checkpoint.epoch = loss.length @@ -145,7 +77,7 @@ export const load_directories = (id) => (dispatch) => { file.uuid = uuidv1() fileLookup[file.uuid] = file const pair = file.name.split('.')[0].split('-') - const dataset = get_dataset(pair[0], unsortedFolder, file.date) + const dataset = datasetLoader.getDataset(module, datasetLookup, pair[0], unsortedFolder, file.date) file.persisted = false file.epoch = parseInt(file.epoch || pair[1].replace(/^\D+/, '')) || 0 dataset.epoch = Math.max(file.epoch, dataset.epoch || 0) @@ -169,7 +101,6 @@ export const load_directories = (id) => (dispatch) => { folderLookup, fileLookup, datasetLookup, - taskLookup, folders, files, checkpoints, output, diff --git a/app/client/modules/samplernn/samplernn.datasets.js b/app/client/modules/samplernn/samplernn.datasets.js index f8cba29..921d1a2 100644 --- a/app/client/modules/samplernn/samplernn.datasets.js +++ b/app/client/modules/samplernn/samplernn.datasets.js @@ -123,7 +123,6 @@ const mapStateToProps = state => ({ }) const mapDispatchToProps = (dispatch, ownProps) => ({ - samplernnActions: bindActionCreators(samplernnActions, dispatch), }) export default connect(mapStateToProps, mapDispatchToProps)(SampleRNNDatasets) |
