import uuidv1 from 'uuid/v1' import socket from '../../socket' import types from '../../types' import actions from '../../actions' import { allProgress } from '../../util' export const load_directories = (id) => (dispatch) => { // console.log(actions) dispatch({ type: types.app.load_progress, progress: { i: 0, n: 7 }}) allProgress([ actions.folder.index({ module: 'samplernn' }), actions.file.index({ module: 'samplernn' }), actions.task.index({ module: 'samplernn' }), actions.socket.list_directory({ module: 'samplernn', dir: 'datasets' }), actions.socket.list_directory({ module: 'samplernn', dir: 'results' }), actions.socket.list_directory({ module: 'samplernn', dir: 'output' }), load_loss()(dispatch), ], (percent, i, n) => { dispatch({ type: types.app.load_progress, progress: { i, n }}) }).then(res => { // console.log(res) const [folders, files, tasks, datasets, results, output, lossReport] = res const unsortedFolder = { id: 0, name: 'unsorted', files: [], datasets: [], } let datasetLookup = {} let folderLookup = {} let fileLookup = {} let taskLookup = {} const get_dataset = (name, folder=unsortedFolder, date) => { const dataset = datasetLookup[name] || empty_dataset(name, folder) if (date) { dataset.date = (dataset.date && ! isNaN(dataset.date)) ? Math.max(+new Date(date), dataset.date) : +new Date(date) } return dataset } const empty_dataset = (name, folder=unsortedFolder) => { const dataset = { name, input: [], checkpoints: [], output: [], } datasetLookup[name] = dataset folder.datasets.push(name) return dataset } // take all of the folders and put them in a lookup folderLookup = folders.reduce((folderLookup, folder) => { folderLookup[folder.id] = { id: folder.id, name: folder.name, folder, files: [], datasets: [], } return folderLookup }, { unsorted: unsortedFolder }) // prepare the files by splitting into two groups const generatedFiles = files.filter(file => file.generated) const ungeneratedFiles = files.filter(file => !file.generated) // build the initial dataset lookup table using the ungenerated files ungeneratedFiles.reduce((datasetLookup, file) => { fileLookup[file.id] = file if (! file.name) { file.name = (file.opt || {}).token || file.url } const name = (file.name || 'unsorted').split('.')[0] const folder = folderLookup[file.folder_id] || unsortedFolder const dataset = get_dataset(name, folder, file.date || file.created_at) if (file.url.match(file.name)) file.persisted = true dataset.input.push(file.id) folder.files.push(file.id) return datasetLookup }, datasetLookup) // go over the generated files and add addl datasets (if the files were deleted) generatedFiles.map(file => { fileLookup[file.id] = file const pair = file.name.split('.')[0].split('-') const folder = folderLookup[file.folder_id] || unsortedFolder const dataset = get_dataset(pair[0], folder, file.date || file.created_at) dataset.output.push(file.id) folder.files.push(file.id) file.epoch = file.epoch || pair[1] }) // also show the various flat audio files we have, in the input area.. const flatDatasets = datasets.filter(s => s.name.match(/(wav|aiff?|flac|mp3)$/) && !s.dir) const builtDatasets = datasets.filter(s => s.dir) builtDatasets.forEach(dir => { const dataset = get_dataset(dir.name) dataset.isBuilt = true }) flatDatasets.forEach(file => { file.uuid = uuidv1() fileLookup[file.uuid] = file const name = file.name.split('.')[0] const dataset = get_dataset(name, unsortedFolder, file.date) file.persisted = false dataset.input.push(file.uuid) }) // exp:coccokit_3-frame_sizes:8,2-n_rnn:2-dataset:coccokit_3 const checkpoints = results.filter(s => s.dir).map(s => { const checkpoint = s.name .split('-') .map(s => s.split(':')) .filter(b => b.length && b[1]) .reduce((a,b) => (a[b[0]] = b[1]) && a, {}) checkpoint.name = checkpoint.name || checkpoint.dataset || checkpoint.exp checkpoint.date = s.date checkpoint.dir = s checkpoint.persisted = false const dataset = get_dataset(checkpoint.name, unsortedFolder, checkpoint.date) const loss = lossReport[checkpoint.name] if (loss) { dataset.epoch = checkpoint.epoch = loss.length checkpoint.training_loss = loss } dataset.checkpoints.push(checkpoint) return checkpoint }) output.map(file => { file.uuid = uuidv1() fileLookup[file.uuid] = file const pair = file.name.split('.')[0].split('-') const dataset = get_dataset(pair[0], unsortedFolder, file.date) file.persisted = false file.epoch = parseInt(file.epoch || pair[1].replace(/^\D+/, '')) || 0 dataset.epoch = Math.max(file.epoch, dataset.epoch || 0) // here check if the file exists in dataset, if so just check that it's persisted const found = dataset.output.some(file_id => { // if (f.name === if (fileLookup[file_id].name === file.name) { fileLookup[file_id].persisted = true return true } return false }) if (! found) { dataset.output.push(file.uuid) } }) dispatch({ type: types.samplernn.init, data: { folderLookup, fileLookup, datasetLookup, taskLookup, folders, files, checkpoints, output, }, }) if (id) { console.log('folder id', id) dispatch({ type: types.samplernn.set_folder, folder_id: id, }) } }).catch(e => { console.error(e) }) } export const load_loss = () => dispatch => { return actions.socket.run_script({ module: 'samplernn', activity: 'report' }) .then(report => { const lossReport = {} report.stdout.split('\n\n').filter(a=>!!a).forEach(data => { const [ name, ...lines ] = data.split('\n') lossReport[name] = lines .map(s => s.split('\t').reduce((a,s) => { const b = s.split(': ') a[b[0]] = b[1] return a }, {}) ) // console.log(loss[name]) }) dispatch({ type: types.samplernn.load_loss, lossReport }) return lossReport }) } export const import_files = (state, datasetLookup, fileLookup) => (dispatch) => { const { selected, folder, url_base, import_action } = state const names = Object.keys(selected).filter(k => selected[k]) let promises switch (import_action) { case 'Hotlink': // in this case, create a new file for each file we see. promises = names.reduce((a,name) => { return datasetLookup[name].output.map(id => fileLookup[id]).map(file => { const partz = file.name.split('.') const ext = partz.pop() return actions.file.create({ folder_id: folder, name: file.name, url: url_base + file.name, mime: 'audio/' + ext, epoch: file.epoch, size: file.size, module: 'samplernn', dataset: name, activity: 'train', datatype: 'audio', generated: true, created_at: new Date(file.date), updated_at: new Date(file.date), }) }).concat(a) }, []) break case 'Upload': promises = names.reduce((a,name) => { return datasetLookup[name].input.map(id => fileLookup[id]).map(file => { if (file.persisted) return null const partz = file.name.split('.') const ext = partz.pop() if (ext === 'wav' || ext === 'flac') return console.log(file) return actions.socket.upload_file({ folder_id: folder, module: 'samplernn', activity: 'train', path: 'datasets', filename: file.name, generated: false, processed: false, datatype: 'audio', ttl: 60000, }) }).concat(a) }, []).filter(a => !! a) break default: break } console.log(promises) return Promise.all(promises).then(data => { console.log(data) }).catch(e => { console.error(e) }) } export const set_folder = (folder) => { types.samplernn.set_folder, folder } export const fetch_url = (url) => (dispatch) => { console.log(url) return actions.queue.add_task({ activity: 'fetch', module: 'samplernn', dataset: 'test', epochs: 1, opt: { url } }, { preempt: true, watch: true }) } export const train_task_now = (dataset, epochs=1) => (dispatch) => { const task = { module: 'samplernn', activity: 'train', dataset: dataset, epochs: epochs, opt: { sample_length: 44100 * 5, n_samples: 6, keep_old_checkpoints: false, } } return actions.queue.add_task(task) }