1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
|
import uuidv1 from 'uuid/v1'
import socket from '../../socket'
import types from '../../types'
import * as datasetLoader from '../../dataset/dataset.loader'
import actions from '../../actions'
import { allProgress } from '../../util'
export const load_directories = (id) => (dispatch) => {
const module = 'samplernn'
allProgress([
datasetLoader.load(module),
actions.task.index({ module }),
// actions.socket.list_directory({ module, dir: 'datasets' }),
// actions.socket.list_directory({ module, dir: 'results' }),
// actions.socket.list_directory({ module, dir: 'output' }),
// actions.socket.disk_usage({ module, dir: 'datasets' }),
], (percent, i, n) => {
dispatch({ type: types.app.load_progress, progress: { i, n }})
}).then(res => {
// console.log(res)
const [datasetApiReport, tasks] = res //, datasets, results, output, datasetUsage, lossReport] = res
const {
folderLookup,
fileLookup,
datasetLookup,
folders,
files,
unsortedFolder,
} = datasetApiReport
// console.log(datasetUsage)
// // also show the various flat audio files we have, in the input area..
// const flatDatasets = datasets.filter(s => s.name.match(/(wav|aiff?|flac|mp3)$/) && !s.dir)
// const builtDatasets = datasets.filter(s => s.dir)
// builtDatasets.forEach(dir => {
// const dataset = datasetLoader.getDataset(module, datasetLookup, dir.name)
// dataset.isBuilt = true
// })
// flatDatasets.forEach(file => {
// file.uuid = uuidv1()
// fileLookup[file.uuid] = file
// const name = file.name.split('.')[0]
// const dataset = datasetLoader.getDataset(module, datasetLookup, name, unsortedFolder, file.date)
// file.persisted = false
// dataset.input.push(file.uuid)
// })
// // exp:coccokit_3-frame_sizes:8,2-n_rnn:2-dataset:coccokit_3
// const checkpoints = results.filter(s => s.dir).map(s => {
// const checkpoint = s.name
// .split('-')
// .map(s => s.split(':'))
// .filter(b => b.length && b[1])
// .reduce((a,b) => (a[b[0]] = b[1]) && a, {})
// checkpoint.name = checkpoint.name || checkpoint.dataset || checkpoint.exp
// checkpoint.date = s.date
// checkpoint.dir = s
// checkpoint.persisted = false
// const dataset = datasetLoader.getDataset(module, datasetLookup, checkpoint.name, unsortedFolder, checkpoint.date)
// const loss = lossReport[checkpoint.name]
// if (loss) {
// dataset.epoch = checkpoint.epoch = loss.length
// checkpoint.training_loss = loss
// }
// dataset.checkpoints.push(checkpoint)
// return checkpoint
// })
// output.map(file => {
// file.uuid = uuidv1()
// fileLookup[file.uuid] = file
// const pair = file.name.split('.')[0].split('-')
// const dataset = datasetLoader.getDataset(module, datasetLookup, pair[0], unsortedFolder, file.date)
// file.persisted = false
// file.epoch = parseInt(file.epoch || pair[1].replace(/^\D+/, '')) || 0
// dataset.epoch = Math.max(file.epoch, dataset.epoch || 0)
// // here check if the file exists in dataset, if so just check that it's persisted
// const found = dataset.output.some(file_id => {
// // if (f.name ===
// if (fileLookup[file_id].name === file.name) {
// fileLookup[file_id].persisted = true
// return true
// }
// return false
// })
// if (! found) {
// dataset.output.push(file.uuid)
// }
// })
dispatch({
type: types.dataset.load,
data: {
module,
folderLookup,
fileLookup,
datasetLookup,
folders, files,
checkpoints,
output,
},
})
if (id) {
console.log('folder id', id)
dispatch({
type: types.pix2pix.set_folder,
folder_id: id,
})
}
}).catch(e => {
console.error(e)
})
}
export const set_folder = (folder) => { types.pix2pix.set_folder, folder }
|