diff options
| author | Jules Laplace <julescarbon@gmail.com> | 2018-06-05 21:03:31 +0200 |
|---|---|---|
| committer | Jules Laplace <julescarbon@gmail.com> | 2018-06-05 21:03:31 +0200 |
| commit | 746c9f9399487ece15c369c4dd9e3388d415c22c (patch) | |
| tree | 42341d940f3650ea77c85daabc24a15a44769d2b /app/client | |
| parent | 3b386480bfc8d0b2cc232fec62225d4b4c778c6b (diff) | |
separate dataset stuff from samplernn stuff
Diffstat (limited to 'app/client')
| -rw-r--r-- | app/client/dataset/dataset.component.js (renamed from app/client/modules/samplernn/samplernn.datasets.js) | 38 | ||||
| -rw-r--r-- | app/client/dataset/dataset.form.js | 2 | ||||
| -rw-r--r-- | app/client/dataset/dataset.reducer.js | 226 | ||||
| -rw-r--r-- | app/client/dataset/upload.reducer.js | 52 | ||||
| -rw-r--r-- | app/client/dataset/upload.status.js (renamed from app/client/dataset/dataset.status.js) | 6 | ||||
| -rw-r--r-- | app/client/modules/samplernn/samplernn.actions.js | 3 | ||||
| -rw-r--r-- | app/client/modules/samplernn/samplernn.reducer.js | 191 | ||||
| -rw-r--r-- | app/client/modules/samplernn/views/samplernn.import.js | 8 | ||||
| -rw-r--r-- | app/client/modules/samplernn/views/samplernn.show.js | 12 | ||||
| -rw-r--r-- | app/client/store.js | 4 | ||||
| -rw-r--r-- | app/client/types.js | 13 |
11 files changed, 286 insertions, 269 deletions
diff --git a/app/client/modules/samplernn/samplernn.datasets.js b/app/client/dataset/dataset.component.js index 921d1a2..14ad852 100644 --- a/app/client/modules/samplernn/samplernn.datasets.js +++ b/app/client/dataset/dataset.component.js @@ -1,27 +1,24 @@ import { h, Component } from 'preact' import { bindActionCreators } from 'redux' import { connect } from 'react-redux' -import * as util from '../../util' +import * as util from '../util' -import * as samplernnActions from './samplernn.actions' -import actions from '../../actions' +import actions from '../actions' -import DatasetForm from '../../dataset/dataset.form' -import NewDatasetForm from '../../dataset/dataset.new' -import { FileList, FileRow } from '../../common/fileList.component' -import Loading from '../../common/loading.component' +import DatasetForm from './dataset.form' +import NewDatasetForm from './dataset.new' +import { FileList, FileRow } from '../common/fileList.component' +import Loading from '../common/loading.component' -import samplernnModule from './samplernn.module' - -class SampleRNNDatasets extends Component { +class DatasetComponent extends Component { render(){ - const { samplernn, folder, match, history } = this.props - if (samplernn.loading) { - return <Loading progress={samplernn.progress} /> + const { loading, progress, module, data, folder, match, history } = this.props + if (loading) { + return <Loading progress={progress} /> } - if (!samplernn.data.folders.length) { + if (!data.folders.length) { console.log('no folders, redirect to /new') - return history.push('/samplernn/new/') + return history.push('/' + module.name + '/new/') } if (!folder || !folder.name) return return ( @@ -37,11 +34,11 @@ class SampleRNNDatasets extends Component { ) } renderGroups(){ - const { samplernn, folder, runner, onPickDataset, onPickFile, datasetActions } = this.props - const { datasetLookup, fileLookup } = samplernn.data + const { module, data, folder, runner, onPickDataset, onPickFile, datasetActions } = this.props + const { datasetLookup, fileLookup } = data const { mapFn, sortFn } = util.sort.orderByFn('date desc') - const moduleOnCPU = runner && runner.cpu.task && runner.cpu.task.module === samplernnModule.name - const moduleOnGPU = runner && runner.gpu.task && runner.gpu.task.module === samplernnModule.name + const moduleOnCPU = runner && runner.cpu.task && runner.cpu.task.module === module.name + const moduleOnGPU = runner && runner.gpu.task && runner.gpu.task.module === module.name const datasets = folder.datasets.map(name => datasetLookup[name]).map(mapFn).sort(sortFn).map(pair => { const dataset = pair[1] let status = '' @@ -118,11 +115,10 @@ class SampleRNNDatasets extends Component { } const mapStateToProps = state => ({ - samplernn: state.module.samplernn, runner: state.system.runner, }) const mapDispatchToProps = (dispatch, ownProps) => ({ }) -export default connect(mapStateToProps, mapDispatchToProps)(SampleRNNDatasets) +export default connect(mapStateToProps, mapDispatchToProps)(DatasetComponent) diff --git a/app/client/dataset/dataset.form.js b/app/client/dataset/dataset.form.js index d462d92..255afa0 100644 --- a/app/client/dataset/dataset.form.js +++ b/app/client/dataset/dataset.form.js @@ -67,7 +67,7 @@ class DatasetForm extends Component { } } -const mapStateToProps = state => state.dataset +const mapStateToProps = state => state.upload const mapDispatchToProps = (dispatch, ownProps) => ({ actions: { diff --git a/app/client/dataset/dataset.reducer.js b/app/client/dataset/dataset.reducer.js index 1b7f0b9..287ebb8 100644 --- a/app/client/dataset/dataset.reducer.js +++ b/app/client/dataset/dataset.reducer.js @@ -1,60 +1,200 @@ import types from '../types' -const datasetInitialState = { - loading: false, +/* + Note: + This is a sub-reducer intended to process the folder-file API output from the DB. + Please run this from inside your module reducers :) +*/ + +const datasetInitialState = () => ({ + loading: true, error: null, - status: '', -} + data: null, +}) -const datasetReducer = (state = datasetInitialState, action) => { +const datasetReducer = (state = datasetInitialState(), action) => { switch(action.type) { - case types.folder.upload_loading: + case types.dataset.load: return { - error: null, - loading: true, - status: 'Loading...', - } - case types.folder.upload_error: - return { - error: null, + ...state, loading: false, - status: 'Error uploading :(', + data: action.data, } - case types.folder.upload_progress: - return { - error: null, - loading: true, - status: 'Upload progress ' + action.percent + '%', + + case types.folder.create: + if (action.data.module === 'samplernn') { + return { + ...state, + loading: false, + data: { + folders: state.data.folders.concat([action.data.id]), + folderLookup: { + ...state.data.folderLookup, + [action.data.id]: action.data, + } + }, + } } - case types.folder.upload_waiting: - return { - error: null, - loading: true, - status: 'Waiting for server to finish processing...', + return state + + case types.file.create: + if (action.data.module === 'samplernn') { + console.log(action.data) + let dataset, old_dataset, folder, old_folder + let dataset_name + if (action.data.dataset) { + dataset_name = action.data.dataset + } + else if (action.data.name) { + dataset_name = action.data.name.split('.')[0] + } + else if (action.data.url) { + dataset_name = action.data.url + } + else { + dataset_name = null + } + if (dataset_name in state.data.datasetLookup) { + old_dataset = state.data.datasetLookup[dataset_name] + dataset = { + ...old_dataset, + input: action.data.generated ? old_dataset.input : [action.data.id].concat(old_dataset.input), + output: !action.data.generated ? old_dataset.output : [action.data.id].concat(old_dataset.output), + } + } else { + console.log(action.data.created_at, action.data.date) + dataset = { + name: dataset_name, + date: action.data.created_at || action.data.date, + input: action.data.generated ? [] : [action.data.id], + checkpoints: [], + output: !action.data.generated ? [] : [action.data.id], + } + } + old_folder = state.data.folderLookup[action.data.folder_id] + folder = { + ...old_folder, + datasets: old_folder.datasets.concat([dataset_name]), + files: old_folder.files.concat([action.data.id]), + } + return { + ...state, + loading: false, + data: { + ...state.data, + files: state.data.files.concat([action.data.id]), + folderLookup: { + ...state.data.folderLookup, + [action.data.folder_id]: folder, + }, + fileLookup: { + ...state.data.fileLookup, + [action.data.id]: action.data, + }, + datasetLookup: { + ...state.data.datasetLookup, + [dataset_name]: dataset, + }, + } + } } - case types.folder.upload_complete: - return { - error: null, - loading: false, - status: 'Upload complete', + return state + + case types.file.update: + if (action.data.module === 'samplernn') { + let old_dataset; + let new_dataset = state.data.datasetLookup[action.data.dataset] + let old_file = state.data.fileLookup[action.data.id] + let new_dataset_update; + if (old_file && action.data.dataset !== old_file.dataset) { + if (state.data.datasetLookup[old_file.dataset]) { + old_dataset = state.data.datasetLookup[old_file.dataset] + old_dataset_update = { + ...old_dataset, + input: old_dataset.input.filter(id => id !== action.data.id), + output: old_dataset.output.filter(id => id !== action.data.id), + } + } + new_dataset_update = { + ...new_dataset, + input: action.data.generated ? [] : [action.data.id], + output: !action.data.generated ? [] : [action.data.id], + } + } + console.log('old_dataset', old_dataset_update.name) + console.log('new_dataset', new_dataset_update.name) + return { + ...state, + loading: false, + data: { + ...state.data, + datasetLookup: old_dataset ? { + ...state.data.datasetLookup, + [old_dataset.name]: old_dataset_update, + [new_dataset.name]: new_dataset_update, + } : new_dataset_update ? { + ...state.data.datasetLookup, + [new_dataset.name]: new_dataset_update, + } : state.data.datasetLookup, + fileLookup: { + ...state.data.fileLookup, + [action.data.id]: action.data, + }, + } + } } - case types.file.create_loading: - return { - error: null, - loading: true, - status: 'Creating file...' + return state + + case types.file.destroy: + if (action.data.module === 'samplernn') { + // delete the key from fileLookup + let fileLookup = { + ...state.data.fileLookup + } + let datasetLookup = { + ...state.data.datasetLookup + } + let folderLookup = { + ...state.data.folderLookup + } + let old_file = fileLookup[action.data.id] + if (!old_file) return state + let old_dataset = state.data.datasetLookup[old_file.dataset] + let dataset + if (old_dataset) { + dataset = { + ...old_dataset, + input: old_dataset.input.filter(id => id !== old_file.id), + output: old_dataset.output.filter(id => id !== old_file.id), + } + if (! dataset.input.length && ! dataset.checkpoints.length && ! dataset.output.length) { + delete datasetLookup[old_file.dataset] + let old_folder = folderLookup[old_file.folder_id] + folderLookup[old_file.folder_id] = { + ...old_folder, + datasets: old_folder.datasets.filter(name => name !== old_file.dataset) + } + } else { + console.log('dataset just needs sweeping') + datasetLookup[old_file.dataset] = dataset + } + } + delete fileLookup[old_file.id] + + return { + ...state, + loading: false, + data: { + ...state.data, + folderLookup, + fileLookup, + datasetLookup, + } + } } - case types.socket.status: - return datasetSocket(state, action.data) - default: return state - } -} -const datasetSocket = (state, action) => { - console.log(action) - switch (action.key) { - default: + default: return state } } diff --git a/app/client/dataset/upload.reducer.js b/app/client/dataset/upload.reducer.js new file mode 100644 index 0000000..e28a18e --- /dev/null +++ b/app/client/dataset/upload.reducer.js @@ -0,0 +1,52 @@ +import types from '../types' + +const uploadInitialState = { + loading: false, + error: null, + status: '', +} + +const uploadReducer = (state = uploadInitialState, action) => { + switch(action.type) { + case types.folder.upload_loading: + return { + error: null, + loading: true, + status: 'Loading...', + } + case types.folder.upload_error: + return { + error: null, + loading: false, + status: 'Error uploading :(', + } + case types.folder.upload_progress: + return { + error: null, + loading: true, + status: 'Upload progress ' + action.percent + '%', + } + case types.folder.upload_waiting: + return { + error: null, + loading: true, + status: 'Waiting for server to finish processing...', + } + case types.folder.upload_complete: + return { + error: null, + loading: false, + status: 'Upload complete', + } + case types.file.create_loading: + return { + error: null, + loading: true, + status: 'Creating file...' + } + default: + return state + } +} + +export default uploadReducer diff --git a/app/client/dataset/dataset.status.js b/app/client/dataset/upload.status.js index 9e9e56f..3fce4fa 100644 --- a/app/client/dataset/dataset.status.js +++ b/app/client/dataset/upload.status.js @@ -4,7 +4,7 @@ import { connect } from 'react-redux' import Loading from '../common/loading.component' -function DatasetStatus (props) { +function UploadStatus (props) { return ( <div class='status'> {props.status} @@ -12,8 +12,8 @@ function DatasetStatus (props) { ) } -const mapStateToProps = state => state.dataset +const mapStateToProps = state => state.upload const mapDispatchToProps = (dispatch, ownProps) => ({}) -export default connect(mapStateToProps, mapDispatchToProps)(DatasetStatus) +export default connect(mapStateToProps, mapDispatchToProps)(UploadStatus) diff --git a/app/client/modules/samplernn/samplernn.actions.js b/app/client/modules/samplernn/samplernn.actions.js index 2be59ed..642319d 100644 --- a/app/client/modules/samplernn/samplernn.actions.js +++ b/app/client/modules/samplernn/samplernn.actions.js @@ -96,8 +96,9 @@ export const load_directories = (id) => (dispatch) => { }) dispatch({ - type: types.samplernn.init, + type: types.dataset.load, data: { + module, folderLookup, fileLookup, datasetLookup, diff --git a/app/client/modules/samplernn/samplernn.reducer.js b/app/client/modules/samplernn/samplernn.reducer.js index c1dee82..1efbc90 100644 --- a/app/client/modules/samplernn/samplernn.reducer.js +++ b/app/client/modules/samplernn/samplernn.reducer.js @@ -1,5 +1,7 @@ import types from '../../types' +import datasetReducer from '../../dataset/dataset.reducer' + const samplernnInitialState = { loading: true, progress: { i: 0, n: 0 }, @@ -11,192 +13,11 @@ const samplernnInitialState = { } const samplernnReducer = (state = samplernnInitialState, action) => { - // console.log(action.type) - switch(action.type) { - case types.samplernn.init: - return { - ...state, - loading: false, - data: action.data, - } - - case types.folder.create: - if (action.data.module === 'samplernn') { - return { - ...state, - loading: false, - data: { - folders: state.data.folders.concat([action.data.id]), - folderLookup: { - ...state.data.folderLookup, - [action.data.id]: action.data, - } - }, - folder: { - ...action.data, - datasets: [], - files: [], - }, - } - } - return state - - case types.file.create: - if (action.data.module === 'samplernn') { - console.log(action.data) - let dataset, old_dataset, folder, old_folder - let dataset_name - if (action.data.dataset) { - dataset_name = action.data.dataset - } - else if (action.data.name) { - dataset_name = action.data.name.split('.')[0] - } - else if (action.data.url) { - dataset_name = action.data.url - } - else { - dataset_name = null - } - if (dataset_name in state.data.datasetLookup) { - old_dataset = state.data.datasetLookup[dataset_name] - dataset = { - ...old_dataset, - input: action.data.generated ? old_dataset.input : [action.data.id].concat(old_dataset.input), - output: !action.data.generated ? old_dataset.output : [action.data.id].concat(old_dataset.output), - } - } else { - console.log(action.data.created_at, action.data.date) - dataset = { - name: dataset_name, - date: action.data.created_at || action.data.date, - input: action.data.generated ? [] : [action.data.id], - checkpoints: [], - output: !action.data.generated ? [] : [action.data.id], - } - } - old_folder = state.data.folderLookup[action.data.folder_id] - folder = { - ...old_folder, - datasets: old_folder.datasets.concat([dataset_name]), - files: old_folder.files.concat([action.data.id]), - } - return { - ...state, - loading: false, - data: { - ...state.data, - files: state.data.files.concat([action.data.id]), - folderLookup: { - ...state.data.folderLookup, - [action.data.folder_id]: folder, - }, - fileLookup: { - ...state.data.fileLookup, - [action.data.id]: action.data, - }, - datasetLookup: { - ...state.data.datasetLookup, - [dataset_name]: dataset, - }, - } - } - } - return state - - case types.file.update: - if (action.data.module === 'samplernn') { - let old_dataset; - let new_dataset = state.data.datasetLookup[action.data.dataset] - let old_file = state.data.fileLookup[action.data.id] - let new_dataset_update; - if (old_file && action.data.dataset !== old_file.dataset) { - if (state.data.datasetLookup[old_file.dataset]) { - old_dataset = state.data.datasetLookup[old_file.dataset] - old_dataset_update = { - ...old_dataset, - input: old_dataset.input.filter(id => id !== action.data.id), - output: old_dataset.output.filter(id => id !== action.data.id), - } - } - new_dataset_update = { - ...new_dataset, - input: action.data.generated ? [] : [action.data.id], - output: !action.data.generated ? [] : [action.data.id], - } - } - console.log('old_dataset', old_dataset_update.name) - console.log('new_dataset', new_dataset_update.name) - return { - ...state, - loading: false, - data: { - ...state.data, - datasetLookup: old_dataset ? { - ...state.data.datasetLookup, - [old_dataset.name]: old_dataset_update, - [new_dataset.name]: new_dataset_update, - } : new_dataset_update ? { - ...state.data.datasetLookup, - [new_dataset.name]: new_dataset_update, - } : state.data.datasetLookup, - fileLookup: { - ...state.data.fileLookup, - [action.data.id]: action.data, - }, - } - } - } - return state - - case types.file.destroy: - if (action.data.module === 'samplernn') { - // delete the key from fileLookup - let fileLookup = { - ...state.data.fileLookup - } - let datasetLookup = { - ...state.data.datasetLookup - } - let folderLookup = { - ...state.data.folderLookup - } - let old_file = fileLookup[action.data.id] - if (!old_file) return state - let old_dataset = state.data.datasetLookup[old_file.dataset] - let dataset - if (old_dataset) { - dataset = { - ...old_dataset, - input: old_dataset.input.filter(id => id !== old_file.id), - output: old_dataset.output.filter(id => id !== old_file.id), - } - if (! dataset.input.length && ! dataset.checkpoints.length && ! dataset.output.length) { - delete datasetLookup[old_file.dataset] - let old_folder = folderLookup[old_file.folder_id] - folderLookup[old_file.folder_id] = { - ...old_folder, - datasets: old_folder.datasets.filter(name => name !== old_file.dataset) - } - } else { - console.log('dataset just needs sweeping') - datasetLookup[old_file.dataset ] = dataset - } - } - delete fileLookup[old_file.id] + if (action.data && action.data.module === 'samplernn') { + return datasetReducer(state, action) + } - return { - ...state, - loading: false, - data: { - ...state.data, - folderLookup, - fileLookup, - datasetLookup, - } - } - } - return state + switch(action.type) { case types.samplernn.set_folder: return { diff --git a/app/client/modules/samplernn/views/samplernn.import.js b/app/client/modules/samplernn/views/samplernn.import.js index 6f5f78a..cce4aea 100644 --- a/app/client/modules/samplernn/views/samplernn.import.js +++ b/app/client/modules/samplernn/views/samplernn.import.js @@ -9,7 +9,7 @@ import Select from '../../../common/select.component' import TextInput from '../../../common/textInput.component' import Button from '../../../common/button.component' -import SampleRNNDatasets from '../samplernn.datasets' +import DatasetComponent from '../../../dataset/dataset.component' const samplernnModule = { name: 'samplernn', @@ -90,7 +90,11 @@ class SampleRNNImport extends Component { </div> </div> </div> - <SampleRNNDatasets + <DatasetComponent + loading={samplernn.loading} + progress={samplernn.progress} + module={samplernnModule} + data={samplernn.data} id="unsorted" folder={folder} history={this.props.history} diff --git a/app/client/modules/samplernn/views/samplernn.show.js b/app/client/modules/samplernn/views/samplernn.show.js index 8b42d44..8a9d589 100644 --- a/app/client/modules/samplernn/views/samplernn.show.js +++ b/app/client/modules/samplernn/views/samplernn.show.js @@ -10,10 +10,10 @@ import * as audioPlayerActions from '../../../common/audioPlayer/audioPlayer.act import Loading from '../../../common/loading.component' import DatasetForm from '../../../dataset/dataset.form' import NewDatasetForm from '../../../dataset/dataset.new' -import DatasetStatus from '../../../dataset/dataset.status' +import UploadStatus from '../../../dataset/upload.status' import { FileList, FileRow } from '../../../common/fileList.component' -import SampleRNNDatasets from '../samplernn.datasets' +import DatasetComponent from '../../../dataset/dataset.component' import samplernnModule from '../samplernn.module' @@ -43,7 +43,7 @@ class SampleRNNShow extends Component { <div class='heading'> <div class='spaced'> <h1>{folder ? folder.name : <Loading />}</h1> - <DatasetStatus /> + <UploadStatus /> </div> </div> {folder && folder.name && folder.name !== 'unsorted' && @@ -54,8 +54,12 @@ class SampleRNNShow extends Component { canUpload canAddURL /> } - <SampleRNNDatasets + <DatasetComponent + loading={samplernn.loading} + progress={samplernn.progress} id={samplernn.folder_id} + module={samplernnModule} + data={samplernn.data} folder={folder} history={history} onPickFile={(file, e) => { diff --git a/app/client/store.js b/app/client/store.js index 99eed29..8ffab15 100644 --- a/app/client/store.js +++ b/app/client/store.js @@ -9,7 +9,7 @@ import { routerReducer } from 'react-router-redux' import systemReducer from './system/system.reducer' import dashboardReducer from './dashboard/dashboard.reducer' import liveReducer from './live/live.reducer' -import datasetReducer from './dataset/dataset.reducer' +import uploadReducer from './dataset/upload.reducer' import queueReducer from './queue/queue.reducer' import audioPlayerReducer from './common/audioPlayer/audioPlayer.reducer' import { moduleReducer } from './modules/module.reducer' @@ -18,7 +18,7 @@ const appReducer = combineReducers({ system: systemReducer, dashboard: dashboardReducer, live: liveReducer, - dataset: datasetReducer, + upload: uploadReducer, queue: queueReducer, router: routerReducer, module: moduleReducer, diff --git a/app/client/types.js b/app/client/types.js index 5b1963b..720051f 100644 --- a/app/client/types.js +++ b/app/client/types.js @@ -22,8 +22,6 @@ export default { ]), file: crud_type('file', [ ]), - dataset: crud_type('dataset', [ - ]), task: crud_type('task', [ 'starting_task', 'stopping_task', @@ -82,11 +80,12 @@ export default { enqueue: 'AUDIO_ENQUEUE', }, dataset: { - upload_files: 'UPLOAD_FILES', - file_progress: 'FILE_PROGRESS', - file_uploaded: 'FILE_UPLOADED', - fetch_url: 'FETCH_URL', - fetch_progress: 'FETCH_PROGRESS', + load: 'DATASET_LOAD', + upload_files: 'DATASET_UPLOAD_FILES', + file_progress: 'DATASET_FILE_PROGRESS', + file_uploaded: 'DATASET_FILE_UPLOADED', + fetch_url: 'DATASET_FETCH_URL', + fetch_progress: 'DATASET_FETCH_PROGRESS', }, samplernn: { init: 'SAMPLERNN_INIT', |
