import socket from '../../socket' import types from '../../types' import actions from '../../actions' export const load_directories = (id) => (dispatch) => { // console.log(actions) Promise.all([ actions.folder.index({ module: 'samplernn' }), actions.file.index({ module: 'samplernn' }), actions.task.index({ module: 'samplernn' }), actions.socket.list_directory({ module: 'samplernn', dir: 'datasets' }), actions.socket.list_directory({ module: 'samplernn', dir: 'results' }), actions.socket.list_directory({ module: 'samplernn', dir: 'output' }), ]).then(res => { // console.log(res) const [folders, files, tasks, datasets, results, output] = res const empty_dataset = (name) => ({ name, input: [], checkpoints: [], output: [], }) // take all of the folders and put them in a lookup const folderLookup = folders.reduce((folderLookup, folder) => { folderLookup[folder.id] = { id: folder.id, name: folder.name, folder, datasets: [] } folder.datasets = [] return folderLookup }, { 'unsorted': { id: 0, name: 'unsorted', datasets: [], } }) // prepare the files by splitting into two groups const generatedFiles = files.filter(file => file.generated) const ungeneratedFiles = files.filter(file => !file.generated) // build the initial dataset lookup table using the ungenerated files const datasetLookup = ungeneratedFiles.reduce((datasetLookup, file) => { file.checkpoints = [] if (! file.name) { file.name = (file.opt || {}).token || file.url // datasetLookup[] = file } const name = (file.name || 'unsorted').split('.')[0] if (! datasetLookup[name]) { const dataset = empty_dataset(name) datasetLookup[dataset.name] = dataset dataset.input.push(file) folderLookup[file.folder_id].datasets.push(dataset) } else { datasetLookup[name].input.push(file) } return datasetLookup }, { unsorted: empty_dataset('unsorted') }) // go over the generated files and add addl datasets (if the files were deleted) generatedFiles.map(file => { const pair = file.name.split('.')[0].split('-') let dataset = datasetLookup[pair[0]] if (!dataset) { dataset = empty_dataset(pair[0]) datasetLookup[dataset.name] = dataset folderLookup[file.folder_id].datasets.push(dataset) } dataset.output.push(file) file.epoch = file.epoch || pair[1] }) // console.log(datasets) const flatDatasets = datasets.filter(s => s.name.match(/(wav|aiff?|flac|mp3)$/) && !s.dir) const builtDatasets = datasets.filter(s => s.dir) builtDatasets.forEach(dir => { let dataset = datasetLookup[dir.name] if (! dataset) { dataset = empty_dataset(dir.name) datasetLookup[dataset.name] = dataset folderLookup.unsorted.datasets.push(dataset) } dataset.isBuilt = true }) flatDatasets.forEach(file => { const name = file.name.split('.')[0] let dataset = datasetLookup[name] if (! dataset) { dataset = empty_dataset(name) datasetLookup[dataset.name] = dataset folderLookup.unsorted.datasets.push(dataset) } file.persisted = false dataset.input.push(file) }) // exp:coccokit_3-frame_sizes:8,2-n_rnn:2-dataset:coccokit_3 const checkpoints = results.filter(s => s.dir).map(s => { const checkpoint = s.name .split('-') .map(s => s.split(':')) .filter(b => b.length && b[1]) .reduce((a,b) => (a[b[0]] = b[1]) && a, {}) checkpoint.name = checkpoint.dataset || checkpoint.exp checkpoint.date = s.date checkpoint.dir = s let dataset = datasetLookup[checkpoint.dataset] if (! dataset) { dataset = empty_dataset(checkpoint.dataset) datasetLookup[dataset.name] = dataset folderLookup.unsorted.datasets.push(dataset) } dataset.checkpoints.push(checkpoint) return checkpoint }) output.map(file => { const pair = file.name.split('.')[0].split('-') let dataset = datasetLookup[pair[0]] if (!dataset) { dataset = empty_dataset(pair[0]) datasetLookup[dataset.name] = dataset folderLookup.unsorted.datasets.push(dataset) } file.persisted = false file.epoch = file.epoch || pair[1].replace(/^\D+/, '') dataset.output.push(file) }) dispatch({ type: types.samplernn.init, data: { folderLookup, folders, files, checkpoints, builtDatasets, output, }, }) if (id) { let folder = id === 'unsorted' ? folderLookup.unsorted : folderLookup[id] dispatch({ type: types.samplernn.set_folder, folder: folder, }) } }).catch(e => { console.error(e) }) } export const load_loss = () => dispatch => { actions.socket.run_script({ module: 'samplernn', activity: 'report' }) .then(report => { const lossReport = {} report.stdout.split('\n\n').filter(a=>!!a).forEach(data => { const [ name, ...lines ] = data.split('\n') lossReport[name] = lines .map(s => s.split('\t').reduce((a,s) => { const b = s.split(': ') a[b[0]] = b[1] return a }, {}) ) // console.log(loss[name]) }) dispatch({ type: types.samplernn.load_loss, lossReport }) }) } export const set_folder = (folder) => { types.samplernn.set_folder, folder } export const fetch_url = (url) => (dispatch) => { console.log(url) actions.task.start_task({ activity: 'fetch', module: 'samplernn', dataset: 'test', epochs: 1, opt: { url } }, { preempt: true, watch: true }) }