summaryrefslogtreecommitdiff
path: root/app/client/dataset/dataset.reducer.js
diff options
context:
space:
mode:
authorJules Laplace <julescarbon@gmail.com>2018-06-05 21:03:31 +0200
committerJules Laplace <julescarbon@gmail.com>2018-06-05 21:03:31 +0200
commit746c9f9399487ece15c369c4dd9e3388d415c22c (patch)
tree42341d940f3650ea77c85daabc24a15a44769d2b /app/client/dataset/dataset.reducer.js
parent3b386480bfc8d0b2cc232fec62225d4b4c778c6b (diff)
separate dataset stuff from samplernn stuff
Diffstat (limited to 'app/client/dataset/dataset.reducer.js')
-rw-r--r--app/client/dataset/dataset.reducer.js226
1 files changed, 183 insertions, 43 deletions
diff --git a/app/client/dataset/dataset.reducer.js b/app/client/dataset/dataset.reducer.js
index 1b7f0b9..287ebb8 100644
--- a/app/client/dataset/dataset.reducer.js
+++ b/app/client/dataset/dataset.reducer.js
@@ -1,60 +1,200 @@
import types from '../types'
-const datasetInitialState = {
- loading: false,
+/*
+ Note:
+ This is a sub-reducer intended to process the folder-file API output from the DB.
+ Please run this from inside your module reducers :)
+*/
+
+const datasetInitialState = () => ({
+ loading: true,
error: null,
- status: '',
-}
+ data: null,
+})
-const datasetReducer = (state = datasetInitialState, action) => {
+const datasetReducer = (state = datasetInitialState(), action) => {
switch(action.type) {
- case types.folder.upload_loading:
+ case types.dataset.load:
return {
- error: null,
- loading: true,
- status: 'Loading...',
- }
- case types.folder.upload_error:
- return {
- error: null,
+ ...state,
loading: false,
- status: 'Error uploading :(',
+ data: action.data,
}
- case types.folder.upload_progress:
- return {
- error: null,
- loading: true,
- status: 'Upload progress ' + action.percent + '%',
+
+ case types.folder.create:
+ if (action.data.module === 'samplernn') {
+ return {
+ ...state,
+ loading: false,
+ data: {
+ folders: state.data.folders.concat([action.data.id]),
+ folderLookup: {
+ ...state.data.folderLookup,
+ [action.data.id]: action.data,
+ }
+ },
+ }
}
- case types.folder.upload_waiting:
- return {
- error: null,
- loading: true,
- status: 'Waiting for server to finish processing...',
+ return state
+
+ case types.file.create:
+ if (action.data.module === 'samplernn') {
+ console.log(action.data)
+ let dataset, old_dataset, folder, old_folder
+ let dataset_name
+ if (action.data.dataset) {
+ dataset_name = action.data.dataset
+ }
+ else if (action.data.name) {
+ dataset_name = action.data.name.split('.')[0]
+ }
+ else if (action.data.url) {
+ dataset_name = action.data.url
+ }
+ else {
+ dataset_name = null
+ }
+ if (dataset_name in state.data.datasetLookup) {
+ old_dataset = state.data.datasetLookup[dataset_name]
+ dataset = {
+ ...old_dataset,
+ input: action.data.generated ? old_dataset.input : [action.data.id].concat(old_dataset.input),
+ output: !action.data.generated ? old_dataset.output : [action.data.id].concat(old_dataset.output),
+ }
+ } else {
+ console.log(action.data.created_at, action.data.date)
+ dataset = {
+ name: dataset_name,
+ date: action.data.created_at || action.data.date,
+ input: action.data.generated ? [] : [action.data.id],
+ checkpoints: [],
+ output: !action.data.generated ? [] : [action.data.id],
+ }
+ }
+ old_folder = state.data.folderLookup[action.data.folder_id]
+ folder = {
+ ...old_folder,
+ datasets: old_folder.datasets.concat([dataset_name]),
+ files: old_folder.files.concat([action.data.id]),
+ }
+ return {
+ ...state,
+ loading: false,
+ data: {
+ ...state.data,
+ files: state.data.files.concat([action.data.id]),
+ folderLookup: {
+ ...state.data.folderLookup,
+ [action.data.folder_id]: folder,
+ },
+ fileLookup: {
+ ...state.data.fileLookup,
+ [action.data.id]: action.data,
+ },
+ datasetLookup: {
+ ...state.data.datasetLookup,
+ [dataset_name]: dataset,
+ },
+ }
+ }
}
- case types.folder.upload_complete:
- return {
- error: null,
- loading: false,
- status: 'Upload complete',
+ return state
+
+ case types.file.update:
+ if (action.data.module === 'samplernn') {
+ let old_dataset;
+ let new_dataset = state.data.datasetLookup[action.data.dataset]
+ let old_file = state.data.fileLookup[action.data.id]
+ let new_dataset_update;
+ if (old_file && action.data.dataset !== old_file.dataset) {
+ if (state.data.datasetLookup[old_file.dataset]) {
+ old_dataset = state.data.datasetLookup[old_file.dataset]
+ old_dataset_update = {
+ ...old_dataset,
+ input: old_dataset.input.filter(id => id !== action.data.id),
+ output: old_dataset.output.filter(id => id !== action.data.id),
+ }
+ }
+ new_dataset_update = {
+ ...new_dataset,
+ input: action.data.generated ? [] : [action.data.id],
+ output: !action.data.generated ? [] : [action.data.id],
+ }
+ }
+ console.log('old_dataset', old_dataset_update.name)
+ console.log('new_dataset', new_dataset_update.name)
+ return {
+ ...state,
+ loading: false,
+ data: {
+ ...state.data,
+ datasetLookup: old_dataset ? {
+ ...state.data.datasetLookup,
+ [old_dataset.name]: old_dataset_update,
+ [new_dataset.name]: new_dataset_update,
+ } : new_dataset_update ? {
+ ...state.data.datasetLookup,
+ [new_dataset.name]: new_dataset_update,
+ } : state.data.datasetLookup,
+ fileLookup: {
+ ...state.data.fileLookup,
+ [action.data.id]: action.data,
+ },
+ }
+ }
}
- case types.file.create_loading:
- return {
- error: null,
- loading: true,
- status: 'Creating file...'
+ return state
+
+ case types.file.destroy:
+ if (action.data.module === 'samplernn') {
+ // delete the key from fileLookup
+ let fileLookup = {
+ ...state.data.fileLookup
+ }
+ let datasetLookup = {
+ ...state.data.datasetLookup
+ }
+ let folderLookup = {
+ ...state.data.folderLookup
+ }
+ let old_file = fileLookup[action.data.id]
+ if (!old_file) return state
+ let old_dataset = state.data.datasetLookup[old_file.dataset]
+ let dataset
+ if (old_dataset) {
+ dataset = {
+ ...old_dataset,
+ input: old_dataset.input.filter(id => id !== old_file.id),
+ output: old_dataset.output.filter(id => id !== old_file.id),
+ }
+ if (! dataset.input.length && ! dataset.checkpoints.length && ! dataset.output.length) {
+ delete datasetLookup[old_file.dataset]
+ let old_folder = folderLookup[old_file.folder_id]
+ folderLookup[old_file.folder_id] = {
+ ...old_folder,
+ datasets: old_folder.datasets.filter(name => name !== old_file.dataset)
+ }
+ } else {
+ console.log('dataset just needs sweeping')
+ datasetLookup[old_file.dataset] = dataset
+ }
+ }
+ delete fileLookup[old_file.id]
+
+ return {
+ ...state,
+ loading: false,
+ data: {
+ ...state.data,
+ folderLookup,
+ fileLookup,
+ datasetLookup,
+ }
+ }
}
- case types.socket.status:
- return datasetSocket(state, action.data)
- default:
return state
- }
-}
-const datasetSocket = (state, action) => {
- console.log(action)
- switch (action.key) {
- default:
+ default:
return state
}
}