diff options
| author | Jules Laplace <julescarbon@gmail.com> | 2018-06-04 02:54:44 +0200 |
|---|---|---|
| committer | Jules Laplace <julescarbon@gmail.com> | 2018-06-04 02:54:44 +0200 |
| commit | 2f22fd5e4a558ed9b2379565be88b9d1e1b9b7c5 (patch) | |
| tree | 4a66cae0cc816bf01949b3076fbb082a460d47cb /app/client/modules/samplernn/samplernn.actions.js | |
| parent | 9871cd35c31e77fac9ed484f80783e3267573016 (diff) | |
instructing server to curl files into the database/s3
Diffstat (limited to 'app/client/modules/samplernn/samplernn.actions.js')
| -rw-r--r-- | app/client/modules/samplernn/samplernn.actions.js | 30 |
1 files changed, 27 insertions, 3 deletions
diff --git a/app/client/modules/samplernn/samplernn.actions.js b/app/client/modules/samplernn/samplernn.actions.js index e6fd5b5..30620a7 100644 --- a/app/client/modules/samplernn/samplernn.actions.js +++ b/app/client/modules/samplernn/samplernn.actions.js @@ -28,7 +28,7 @@ export const load_directories = (id) => (dispatch) => { const get_dataset = (name, folder=unsortedFolder, date) => { const dataset = datasetLookup[name] || empty_dataset(name, folder) if (date) { - dataset.date = dataset.date ? Math.max(+new Date(date), dataset.date) : +new Date(date) + dataset.date = (dataset.date && ! isNaN(dataset.date)) ? Math.max(+new Date(date), dataset.date) : +new Date(date) } return dataset } @@ -44,6 +44,7 @@ export const load_directories = (id) => (dispatch) => { folder.datasets.push(dataset) return dataset } + // take all of the folders and put them in a lookup const folderLookup = folders.reduce((folderLookup, folder) => { folderLookup[folder.id] = { id: folder.id, name: folder.name, folder, datasets: [] } @@ -63,7 +64,8 @@ export const load_directories = (id) => (dispatch) => { file.name = (file.opt || {}).token || file.url } const name = (file.name || 'unsorted').split('.')[0] - const dataset = get_dataset(name, folderLookup[file.folder_id], unsortedFolder, file.date) + const dataset = get_dataset(name, folderLookup[file.folder_id] || unsortedFolder, file.date || file.created_at) + if (file.url.match(file.name)) file.persisted = true dataset.input.push(file) return datasetLookup }, datasetLookup) @@ -71,7 +73,7 @@ export const load_directories = (id) => (dispatch) => { // go over the generated files and add addl datasets (if the files were deleted) generatedFiles.map(file => { const pair = file.name.split('.')[0].split('-') - const dataset = get_dataset(pair[0], folderLookup[file.folder_id], unsortedFolder, file.date) + const dataset = get_dataset(pair[0], folderLookup[file.folder_id] || unsortedFolder, file.date || file.created_at) dataset.output.push(file) file.epoch = file.epoch || pair[1] }) @@ -210,10 +212,32 @@ export const import_files = (state, datasetLookup) => (dispatch) => { }, []) break case 'Upload': + promises = names.reduce((a,name) => { + console.log(datasetLookup[name]) + return datasetLookup[name].input.map(file => { + if (file.persisted) return null + const partz = file.name.split('.') + const ext = partz.pop() + if (ext === 'wav' || ext === 'flac') return + console.log(file) + return actions.socket.upload_file({ + folder_id: folder, + module: 'samplernn', + activity: 'train', + path: 'datasets', + filename: file.name, + generated: false, + processed: false, + datatype: 'audio', + ttl: 60000, + }) + }).concat(a) + }, []).filter(a => !! a) break default: break } + console.log(promises) return Promise.all(promises).then(data => { console.log(data) }).catch(e => { |
