import socket from '../../socket' import types from '../../types' import actions from '../../actions' export const load_directories = (id) => (dispatch) => { // console.log(actions) Promise.all([ actions.folder.index({ module: 'samplernn' }), actions.file.index({ module: 'samplernn' }), actions.task.index({ module: 'samplernn' }), actions.socket.list_directory({ module: 'samplernn', dir: 'datasets' }), actions.socket.list_directory({ module: 'samplernn', dir: 'results' }), actions.socket.list_directory({ module: 'samplernn', dir: 'output' }), load_loss()(dispatch), ]).then(res => { // console.log(res) const [folders, files, tasks, datasets, results, output, lossReport] = res const unsortedFolder = { id: 0, name: 'unsorted', datasets: [], } const datasetLookup = {} const get_dataset = (name, folder=unsortedFolder, date) => { const dataset = datasetLookup[name] || empty_dataset(name, folder) if (date) { dataset.date = dataset.date ? Math.max(+new Date(date), dataset.date) : +new Date(date) } return dataset } const empty_dataset = (name, folder=unsortedFolder) => { const dataset = { name, input: [], checkpoints: [], output: [], } datasetLookup[dataset.name] = dataset folder.datasets.push(dataset) return dataset } // take all of the folders and put them in a lookup const folderLookup = folders.reduce((folderLookup, folder) => { folderLookup[folder.id] = { id: folder.id, name: folder.name, folder, datasets: [] } folder.datasets = [] return folderLookup }, { unsorted: unsortedFolder }) // prepare the files by splitting into two groups const generatedFiles = files.filter(file => file.generated) const ungeneratedFiles = files.filter(file => !file.generated) // build the initial dataset lookup table using the ungenerated files ungeneratedFiles.reduce((datasetLookup, file) => { if (! file.name) { file.name = (file.opt || {}).token || file.url } const name = (file.name || 'unsorted').split('.')[0] const dataset = get_dataset(name, folderLookup[file.folder_id], unsortedFolder, file.date) dataset.input.push(file) return datasetLookup }, datasetLookup) // go over the generated files and add addl datasets (if the files were deleted) generatedFiles.map(file => { const pair = file.name.split('.')[0].split('-') const dataset = get_dataset(pair[0], folderLookup[file.folder_id], unsortedFolder, file.date) dataset.output.push(file) file.epoch = file.epoch || pair[1] }) const flatDatasets = datasets.filter(s => s.name.match(/(wav|aiff?|flac|mp3)$/) && !s.dir) const builtDatasets = datasets.filter(s => s.dir) builtDatasets.forEach(dir => { const dataset = get_dataset(dir.name) dataset.isBuilt = true }) flatDatasets.forEach(file => { const name = file.name.split('.')[0] const dataset = get_dataset(name, unsortedFolder, file.date) file.persisted = false dataset.input.push(file) }) // exp:coccokit_3-frame_sizes:8,2-n_rnn:2-dataset:coccokit_3 const checkpoints = results.filter(s => s.dir).map(s => { const checkpoint = s.name .split('-') .map(s => s.split(':')) .filter(b => b.length && b[1]) .reduce((a,b) => (a[b[0]] = b[1]) && a, {}) checkpoint.name = checkpoint.name || checkpoint.dataset || checkpoint.exp checkpoint.date = s.date checkpoint.dir = s checkpoint.persisted = false const dataset = get_dataset(checkpoint.name, unsortedFolder, checkpoint.date) const loss = lossReport[checkpoint.name] if (loss) { dataset.epoch = checkpoint.epoch = loss.length checkpoint.training_loss = loss } dataset.checkpoints.push(checkpoint) return checkpoint }) output.map(file => { const pair = file.name.split('.')[0].split('-') const dataset = get_dataset(pair[0], unsortedFolder, file.date) file.persisted = false file.epoch = parseInt(file.epoch || pair[1].replace(/^\D+/, '')) || 0 dataset.epoch = Math.max(file.epoch, dataset.epoch || 0) dataset.output.push(file) }) dispatch({ type: types.samplernn.init, data: { folderLookup, folders, files, checkpoints, builtDatasets, output, }, }) if (id) { let folder = id === 'unsorted' ? folderLookup.unsorted : folderLookup[id] dispatch({ type: types.samplernn.set_folder, folder: folder, }) } }).catch(e => { console.error(e) }) } export const load_loss = () => dispatch => { return actions.socket.run_script({ module: 'samplernn', activity: 'report' }) .then(report => { const lossReport = {} report.stdout.split('\n\n').filter(a=>!!a).forEach(data => { const [ name, ...lines ] = data.split('\n') lossReport[name] = lines .map(s => s.split('\t').reduce((a,s) => { const b = s.split(': ') a[b[0]] = b[1] return a }, {}) ) // console.log(loss[name]) }) dispatch({ type: types.samplernn.load_loss, lossReport }) return lossReport }) } export const set_folder = (folder) => { types.samplernn.set_folder, folder } export const fetch_url = (url) => (dispatch) => { console.log(url) actions.task.start_task({ activity: 'fetch', module: 'samplernn', dataset: 'test', epochs: 1, opt: { url } }, { preempt: true, watch: true }) }