summaryrefslogtreecommitdiff
path: root/app/client/modules/samplernn/samplernn.actions.js
diff options
context:
space:
mode:
authorJules Laplace <julescarbon@gmail.com>2018-06-04 19:33:21 +0200
committerJules Laplace <julescarbon@gmail.com>2018-06-04 19:33:21 +0200
commit521b024439b202be03447188925869100904b807 (patch)
treef64a5d37fc520c8b2ec563703877288142032916 /app/client/modules/samplernn/samplernn.actions.js
parent7bb603e202710afe9f7cc4bea16a72014bc7404c (diff)
refactor state
Diffstat (limited to 'app/client/modules/samplernn/samplernn.actions.js')
-rw-r--r--app/client/modules/samplernn/samplernn.actions.js33
1 files changed, 23 insertions, 10 deletions
diff --git a/app/client/modules/samplernn/samplernn.actions.js b/app/client/modules/samplernn/samplernn.actions.js
index fe73bf2..828850e 100644
--- a/app/client/modules/samplernn/samplernn.actions.js
+++ b/app/client/modules/samplernn/samplernn.actions.js
@@ -1,3 +1,5 @@
+import uuidv1 from 'uuid/v1'
+
import socket from '../../socket'
import types from '../../types'
@@ -28,7 +30,10 @@ export const load_directories = (id) => (dispatch) => {
datasets: [],
}
- const datasetLookup = {}
+ let datasetLookup = {}
+ let folderLookup = {}
+ let fileLookup = {}
+ let taskLookup = {}
const get_dataset = (name, folder=unsortedFolder, date) => {
const dataset = datasetLookup[name] || empty_dataset(name, folder)
@@ -45,13 +50,13 @@ export const load_directories = (id) => (dispatch) => {
checkpoints: [],
output: [],
}
- datasetLookup[dataset.name] = dataset
- folder.datasets.push(dataset)
+ datasetLookup[name] = dataset
+ folder.datasets.push(name)
return dataset
}
// take all of the folders and put them in a lookup
- const folderLookup = folders.reduce((folderLookup, folder) => {
+ folderLookup = folders.reduce((folderLookup, folder) => {
folderLookup[folder.id] = {
id: folder.id,
name: folder.name,
@@ -70,6 +75,7 @@ export const load_directories = (id) => (dispatch) => {
// build the initial dataset lookup table using the ungenerated files
ungeneratedFiles.reduce((datasetLookup, file) => {
+ fileLookup[file.id] = file
if (! file.name) {
file.name = (file.opt || {}).token || file.url
}
@@ -77,18 +83,19 @@ export const load_directories = (id) => (dispatch) => {
const folder = folderLookup[file.folder_id] || unsortedFolder
const dataset = get_dataset(name, folder, file.date || file.created_at)
if (file.url.match(file.name)) file.persisted = true
- dataset.input.push(file)
- folder.files.push(file)
+ dataset.input.push(file.id)
+ folder.files.push(file.id)
return datasetLookup
}, datasetLookup)
// go over the generated files and add addl datasets (if the files were deleted)
generatedFiles.map(file => {
+ fileLookup[file.id] = file
const pair = file.name.split('.')[0].split('-')
const folder = folderLookup[file.folder_id] || unsortedFolder
const dataset = get_dataset(pair[0], folder, file.date || file.created_at)
- dataset.output.push(file)
- folder.files.push(file)
+ dataset.output.push(file.id)
+ folder.files.push(file.id)
file.epoch = file.epoch || pair[1]
})
@@ -101,10 +108,12 @@ export const load_directories = (id) => (dispatch) => {
})
flatDatasets.forEach(file => {
+ file.uuid = uuidv1()
+ fileLookup[file.uuid] = file
const name = file.name.split('.')[0]
const dataset = get_dataset(name, unsortedFolder, file.date)
file.persisted = false
- dataset.input.push(file)
+ dataset.input.push(file.uuid)
})
// exp:coccokit_3-frame_sizes:8,2-n_rnn:2-dataset:coccokit_3
@@ -129,6 +138,8 @@ export const load_directories = (id) => (dispatch) => {
})
output.map(file => {
+ file.uuid = uuidv1()
+ fileLookup[file.uuid] = file
const pair = file.name.split('.')[0].split('-')
const dataset = get_dataset(pair[0], unsortedFolder, file.date)
file.persisted = false
@@ -144,7 +155,7 @@ export const load_directories = (id) => (dispatch) => {
return false
})
if (! found) {
- dataset.output.push(file)
+ dataset.output.push(file.uuid)
}
})
@@ -152,7 +163,9 @@ export const load_directories = (id) => (dispatch) => {
type: types.samplernn.init,
data: {
folderLookup,
+ fileLookup,
datasetLookup,
+ taskLookup,
folders, files,
checkpoints,
output,