import uuidv1 from 'uuid/v1' import socket from '../../socket' import types from '../../types' import * as datasetLoader from '../../dataset/dataset.loader' import actions from '../../actions' import util from '../../util' import samplernnModule from './samplernn.module' export const load_directories = (id) => (dispatch) => { const module = samplernnModule.name util.allProgress([ datasetLoader.load(module), actions.socket.list_directory({ module, dir: 'datasets' }), actions.socket.list_directory({ module, dir: 'results' }), actions.socket.list_directory({ module, dir: 'output' }), actions.socket.disk_usage({ module, dir: 'datasets' }), load_loss()(dispatch), ], (percent, i, n) => { dispatch({ type: types.app.load_progress, progress: { i, n }, data: { module: 'samplernn' }, }) }).then(res => { // console.log(res) const [datasetApiReport, datasets, results, output, datasetUsage, lossReport] = res let { folderLookup, fileLookup, datasetLookup, folders, files, unsortedFolder, } = datasetApiReport console.log(datasetUsage) // also show the various flat audio files we have, in the input area.. const flatDatasets = datasets.filter(s => s.name.match(/(wav|aiff?|flac|mp3)$/) && !s.dir) const builtDatasets = datasets.filter(s => s.dir) builtDatasets.forEach(dir => { const dataset = datasetLoader.getDataset(module, datasetLookup, dir.name, unsortedFolder, dir.date) dataset.isBuilt = true }) flatDatasets.forEach(file => { file.uuid = uuidv1() fileLookup[file.uuid] = file const name = file.name.split('.')[0] const dataset = datasetLoader.getDataset(module, datasetLookup, name, unsortedFolder, file.date) file.persisted = false dataset.input.push(file.uuid) }) // exp:coccokit_3-frame_sizes:8,2-n_rnn:2-dataset:coccokit_3 const checkpoints = results.filter(s => s.dir).map(s => { const checkpoint = s.name .split('-') .map(s => s.split(':')) .filter(b => b.length && b[1]) .reduce((a,b) => (a[b[0]] = b[1]) && a, {}) checkpoint.name = checkpoint.name || checkpoint.dataset || checkpoint.exp if (!checkpoint.name) return checkpoint.date = s.date checkpoint.dir = s checkpoint.persisted = false const dataset = datasetLoader.getDataset(module, datasetLookup, checkpoint.name, unsortedFolder, checkpoint.date) const loss = lossReport[checkpoint.name] if (loss) { dataset.epoch = checkpoint.epoch = loss.length checkpoint.training_loss = loss } dataset.checkpoints.push(checkpoint) return checkpoint }).filter(c => !!c) output.map(file => { file.uuid = uuidv1() fileLookup[file.uuid] = file const pair = file.name.split('.')[0].split('-') const dataset = datasetLoader.getDataset(module, datasetLookup, pair[0], unsortedFolder, file.date) file.persisted = false file.epoch = parseInt(file.epoch || pair[1].replace(/^\D+/, '')) || 0 dataset.epoch = Math.max(file.epoch, dataset.epoch || 0) // here check if the file exists in dataset, if so just check that it's persisted const found = dataset.output.some(file_id => { // if (f.name === if (fileLookup[file_id].name === file.name) { fileLookup[file_id].persisted = true return true } return false }) if (! found) { dataset.output.push(file.uuid) } }) dispatch({ type: types.dataset.load, data: { module, folderLookup, fileLookup, datasetLookup, folders, files, checkpoints, output, }, }) }).catch(e => { console.error(e) }) if (id) { dispatch({ type: types.dataset.set_folder, data: { folder_id: id, module }, }) } } export const load_graph = () => dispatch => { const module = samplernnModule.name util.allProgress([ load_loss()(dispatch), actions.socket.list_directory({ module, dir: 'results' }), ], (percent, i, n) => { dispatch({ type: types.app.load_progress, progress: { i, n }, data: { module: 'samplernn' }, }) }).then(res => { const [lossReport, results] = res dispatch({ type: types.samplernn.load_graph, lossReport, results, }) }) } export const load_loss = () => dispatch => { return actions.socket.run_script({ module: 'samplernn', activity: 'report' }) .then(report => { const lossReport = {} report.stdout.split('\n\n').filter(a=>!!a).forEach(data => { const [ name, ...lines ] = data.split('\n') lossReport[name] = lines .map(s => s.split('\t').reduce((a,s) => { const b = s.split(': ') a[b[0]] = b[1] return a }, {}) ) // console.log(data, name, lossReport[name]) }) dispatch({ type: types.samplernn.load_loss, lossReport }) return lossReport }) } export const import_files = (state, datasetLookup, fileLookup) => (dispatch) => { const { selected, folder_id, url_base, import_action } = state const names = Object.keys(selected).filter(k => selected[k]) let promises switch (import_action) { case 'Hotlink': // in this case, create a new file for each file we see. promises = names.reduce((a,name) => { return datasetLookup[name].output.map(id => fileLookup[id]).map(file => { const partz = file.name.split('.') const ext = partz.pop() return actions.file.create({ folder_id: folder_id, name: file.name, url: url_base + file.name, mime: 'audio/' + ext, epoch: file.epoch, size: file.size, module: 'samplernn', dataset: name, activity: 'train', datatype: 'audio', generated: true, created_at: new Date(file.date), updated_at: new Date(file.date), }) }).concat(a) }, []) break case 'Upload': promises = names.reduce((a,name) => { return datasetLookup[name].input.map(id => fileLookup[id]).map(file => { if (file.persisted) return null const partz = file.name.split('.') const ext = partz.pop() if (ext === 'wav' || ext === 'flac') return console.log(file) return actions.socket.upload_file({ folder_id: folder_id, module: 'samplernn', activity: 'train', path: 'datasets', filename: file.name, generated: false, processed: false, datatype: 'audio', ttl: 60000, }) }).concat(a) }, []).filter(a => !! a) break default: break } console.log(promises) return Promise.all(promises).then(data => { console.log(data) window.location.href = '/samplernn/datasets/' + folder_id + '/' }).catch(e => { console.error(e) }) }