summaryrefslogtreecommitdiff
path: root/public/bundle.js
diff options
context:
space:
mode:
authorJules Laplace <julescarbon@gmail.com>2018-06-04 19:46:57 +0200
committerJules Laplace <julescarbon@gmail.com>2018-06-04 19:46:57 +0200
commit7c72a0d2ced5ce128364b4a1d17696ffa9c3f63c (patch)
treec6161bd295839e97b9885082a2fdf2361156be68 /public/bundle.js
parent521b024439b202be03447188925869100904b807 (diff)
denormalize state tree
Diffstat (limited to 'public/bundle.js')
-rw-r--r--public/bundle.js40
1 files changed, 29 insertions, 11 deletions
diff --git a/public/bundle.js b/public/bundle.js
index bd6159d..cc88193 100644
--- a/public/bundle.js
+++ b/public/bundle.js
@@ -4987,10 +4987,10 @@ var load_directories = exports.load_directories = function load_directories(id)
file.epoch = parseInt(file.epoch || pair[1].replace(/^\D+/, '')) || 0;
dataset.epoch = Math.max(file.epoch, dataset.epoch || 0);
// here check if the file exists in dataset, if so just check that it's persisted
- var found = dataset.output.some(function (f) {
+ var found = dataset.output.some(function (file_id) {
// if (f.name ===
- if (f.name === file.name) {
- f.persisted = true;
+ if (fileLookup[file_id].name === file.name) {
+ fileLookup[file_id].persisted = true;
return true;
}
return false;
@@ -5056,7 +5056,7 @@ var load_loss = exports.load_loss = function load_loss() {
};
};
-var import_files = exports.import_files = function import_files(state, datasetLookup) {
+var import_files = exports.import_files = function import_files(state, datasetLookup, fileLookup) {
return function (dispatch) {
var selected = state.selected,
folder = state.folder,
@@ -5071,7 +5071,9 @@ var import_files = exports.import_files = function import_files(state, datasetLo
case 'Hotlink':
// in this case, create a new file for each file we see.
promises = names.reduce(function (a, name) {
- return datasetLookup[name].output.map(function (file) {
+ return datasetLookup[name].output.map(function (id) {
+ return fileLookup[id];
+ }).map(function (file) {
var partz = file.name.split('.');
var ext = partz.pop();
return _actions2.default.file.create({
@@ -5094,8 +5096,9 @@ var import_files = exports.import_files = function import_files(state, datasetLo
break;
case 'Upload':
promises = names.reduce(function (a, name) {
- console.log(datasetLookup[name]);
- return datasetLookup[name].input.map(function (file) {
+ return datasetLookup[name].input.map(function (id) {
+ return fileLookup[id];
+ }).map(function (file) {
if (file.persisted) return null;
var partz = file.name.split('.');
var ext = partz.pop();
@@ -5658,7 +5661,7 @@ var SampleRNNImport = function (_Component) {
var samplernn = this.props.samplernn;
console.log(this.state);
- this.props.actions.import_files(this.state, samplernn.data.datasetLookup);
+ this.props.actions.import_files(this.state, samplernn.data.datasetLookup, samplernn.data.fileLookup);
}
}]);
@@ -5793,6 +5796,7 @@ var SampleRNNLoss = function (_Component) {
var ctx = canvas.getContext('2d');
var w = canvas.width = canvas.width * devicePixelRatio;
var h = canvas.height = canvas.height * devicePixelRatio;
+ ctx.clearRect(0, 0, w, h);
var keys = Object.keys(lossReport).sort().filter(function (k) {
return !!lossReport[k].length;
@@ -5891,6 +5895,13 @@ var SampleRNNLoss = function (_Component) {
}
});
ctx.stroke();
+ var i = loss.length - 1;
+ var v = parseFloat(loss[i].training_loss);
+ var x = (0, _util.lerp)((i - 2) / (epochsMax / epochsScaleFactor) * epochsScaleFactor, wmin, wmax);
+ var y = (0, _util.lerp)((0, _util.norm)(v, scaleMin, scaleMax), hmax, hmin);
+ var fontSize = 9;
+ ctx.font = 'italic ' + fontSize * devicePixelRatio + 'px "Georgia"';
+ ctx.fillText(key, x + fontSize, y + fontSize);
});
}
}]);
@@ -6263,7 +6274,10 @@ var SampleRNNResults = function (_Component) {
var _this3 = this;
if (this.props.samplernn.loading) return (0, _preact.h)(_loading2.default, { progress: this.props.samplernn.progress });
- var folderLookup = this.props.samplernn.data.folderLookup;
+ var _props$samplernn$data = this.props.samplernn.data,
+ folderLookup = _props$samplernn$data.folderLookup,
+ fileLookup = _props$samplernn$data.fileLookup,
+ datasetLookup = _props$samplernn$data.datasetLookup;
// const { folderLookup } = samplernn
var renders = Object.keys(folderLookup).sort(util.sort.stringSort.asc).map(function (key) {
@@ -6273,7 +6287,9 @@ var SampleRNNResults = function (_Component) {
mapFn = _util$sort$orderByFn.mapFn,
sortFn = _util$sort$orderByFn.sortFn;
- var datasetPairs = folder.datasets.map(mapFn).sort(sortFn);
+ var datasetPairs = folder.datasets.map(function (name) {
+ return datasetLookup[name];
+ }).map(mapFn).sort(sortFn);
var bestRenders = datasetPairs.map(function (pair) {
return pair[1];
}).filter(function (dataset) {
@@ -6281,7 +6297,9 @@ var SampleRNNResults = function (_Component) {
}).map(function (dataset) {
var output = dataset.output;
- return output.map(mapFn).sort(sortFn)[0][1];
+ return output.map(function (id) {
+ return fileLookup[id];
+ }).map(mapFn).sort(sortFn)[0][1];
});
// console.log(bestRenders.map(r => r.epoch))
var path = folder.name === 'unsorted' ? "/samplernn/import/" : "/samplernn/datasets/" + folder.id + "/";