summaryrefslogtreecommitdiff
path: root/app
diff options
context:
space:
mode:
authorJules Laplace <julescarbon@gmail.com>2018-06-04 02:54:44 +0200
committerJules Laplace <julescarbon@gmail.com>2018-06-04 02:54:44 +0200
commit2f22fd5e4a558ed9b2379565be88b9d1e1b9b7c5 (patch)
tree4a66cae0cc816bf01949b3076fbb082a460d47cb /app
parent9871cd35c31e77fac9ed484f80783e3267573016 (diff)
instructing server to curl files into the database/s3
Diffstat (limited to 'app')
-rw-r--r--app/client/api/crud.upload.js2
-rw-r--r--app/client/modules/samplernn/samplernn.actions.js30
-rw-r--r--app/client/socket/socket.actions.js4
-rw-r--r--app/relay/remote.js12
-rw-r--r--app/relay/runner.js29
-rw-r--r--app/server/bridge.js2
-rw-r--r--app/server/proxy.js4
-rw-r--r--app/server/site.js10
8 files changed, 78 insertions, 15 deletions
diff --git a/app/client/api/crud.upload.js b/app/client/api/crud.upload.js
index 01c3e18..29216df 100644
--- a/app/client/api/crud.upload.js
+++ b/app/client/api/crud.upload.js
@@ -15,7 +15,7 @@ export function crud_upload(type, fd, data, dispatch) {
xhr.addEventListener("load", uploadComplete, false)
xhr.addEventListener("error", uploadFailed, false)
xhr.addEventListener("abort", uploadCancelled, false)
- xhr.open("POST", '/' + type + '/' + id + '/upload/')
+ xhr.open("POST", '/api/' + type + '/' + id + '/upload/')
xhr.send(fd)
dispatch && dispatch({ type: as_type(type, 'upload_loading')})
diff --git a/app/client/modules/samplernn/samplernn.actions.js b/app/client/modules/samplernn/samplernn.actions.js
index e6fd5b5..30620a7 100644
--- a/app/client/modules/samplernn/samplernn.actions.js
+++ b/app/client/modules/samplernn/samplernn.actions.js
@@ -28,7 +28,7 @@ export const load_directories = (id) => (dispatch) => {
const get_dataset = (name, folder=unsortedFolder, date) => {
const dataset = datasetLookup[name] || empty_dataset(name, folder)
if (date) {
- dataset.date = dataset.date ? Math.max(+new Date(date), dataset.date) : +new Date(date)
+ dataset.date = (dataset.date && ! isNaN(dataset.date)) ? Math.max(+new Date(date), dataset.date) : +new Date(date)
}
return dataset
}
@@ -44,6 +44,7 @@ export const load_directories = (id) => (dispatch) => {
folder.datasets.push(dataset)
return dataset
}
+
// take all of the folders and put them in a lookup
const folderLookup = folders.reduce((folderLookup, folder) => {
folderLookup[folder.id] = { id: folder.id, name: folder.name, folder, datasets: [] }
@@ -63,7 +64,8 @@ export const load_directories = (id) => (dispatch) => {
file.name = (file.opt || {}).token || file.url
}
const name = (file.name || 'unsorted').split('.')[0]
- const dataset = get_dataset(name, folderLookup[file.folder_id], unsortedFolder, file.date)
+ const dataset = get_dataset(name, folderLookup[file.folder_id] || unsortedFolder, file.date || file.created_at)
+ if (file.url.match(file.name)) file.persisted = true
dataset.input.push(file)
return datasetLookup
}, datasetLookup)
@@ -71,7 +73,7 @@ export const load_directories = (id) => (dispatch) => {
// go over the generated files and add addl datasets (if the files were deleted)
generatedFiles.map(file => {
const pair = file.name.split('.')[0].split('-')
- const dataset = get_dataset(pair[0], folderLookup[file.folder_id], unsortedFolder, file.date)
+ const dataset = get_dataset(pair[0], folderLookup[file.folder_id] || unsortedFolder, file.date || file.created_at)
dataset.output.push(file)
file.epoch = file.epoch || pair[1]
})
@@ -210,10 +212,32 @@ export const import_files = (state, datasetLookup) => (dispatch) => {
}, [])
break
case 'Upload':
+ promises = names.reduce((a,name) => {
+ console.log(datasetLookup[name])
+ return datasetLookup[name].input.map(file => {
+ if (file.persisted) return null
+ const partz = file.name.split('.')
+ const ext = partz.pop()
+ if (ext === 'wav' || ext === 'flac') return
+ console.log(file)
+ return actions.socket.upload_file({
+ folder_id: folder,
+ module: 'samplernn',
+ activity: 'train',
+ path: 'datasets',
+ filename: file.name,
+ generated: false,
+ processed: false,
+ datatype: 'audio',
+ ttl: 60000,
+ })
+ }).concat(a)
+ }, []).filter(a => !! a)
break
default:
break
}
+ console.log(promises)
return Promise.all(promises).then(data => {
console.log(data)
}).catch(e => {
diff --git a/app/client/socket/socket.actions.js b/app/client/socket/socket.actions.js
index e787f1a..ffe1cfe 100644
--- a/app/client/socket/socket.actions.js
+++ b/app/client/socket/socket.actions.js
@@ -10,7 +10,11 @@ export function list_directory(opt) {
export function run_script(opt) {
return syscall_async('run_script', opt)
}
+export function upload_file(opt) {
+ return syscall_async('upload_file', opt)
+}
export const syscall_async = (tag, payload, ttl=10000) => {
+ ttl = payload.ttl || ttl
return new Promise( (resolve, reject) => {
const uuid = uuidv1()
const timeout = setTimeout(() => {
diff --git a/app/relay/remote.js b/app/relay/remote.js
index 4da9200..252258f 100644
--- a/app/relay/remote.js
+++ b/app/relay/remote.js
@@ -87,7 +87,7 @@ remote.on('system', (data) => {
})
break
case 'list_directory':
- runner.list_directory(data.payload, (files) => {
+ runner.list_directory(data.payload, files => {
remote.emit('system_res', {
type: 'list_directory',
dir: data.payload,
@@ -96,6 +96,16 @@ remote.on('system', (data) => {
})
})
break
+ case 'upload_file':
+ runner.upload_file(data.payload, (error, stdout, stderr) => {
+ remote.emit('system_res', {
+ type: 'upload_file',
+ query: data.payload,
+ uuid: data.uuid,
+ stdout,
+ })
+ })
+ break
case 'get_status':
remote.emit('system_res', {
type: 'relay_status',
diff --git a/app/relay/runner.js b/app/relay/runner.js
index 741ef8a..4762045 100644
--- a/app/relay/runner.js
+++ b/app/relay/runner.js
@@ -56,6 +56,30 @@ function clear_task(is_gpu, task){
}
}
+function sanitize_path(f){
+ return f.replace(/^\//,'').replace(/\.\./, '')
+}
+
+export function upload_file(task, cb) {
+ const module = modules[task.module]
+ const filepath = path.join(module.cwd, sanitize_path(task.path), sanitize_path(task.filename))
+ const params = [
+ '-F', 'module=' + task.module,
+ '-F', 'activity=' + task.activity,
+ '-F', 'generated=' + (String(task.generated) === 'true'),
+ '-F', 'processed=' + (String(task.processed) === 'true'),
+ '-F', "file=@" + filepath,
+ process.env.API_REMOTE + '/api/folder/' + task.folder_id + '/upload/',
+ ]
+ console.log(params)
+ execFile('curl', params, cb)
+ // curl \
+ // -F "module=samplernn" \
+ // -F "activity=train" \
+ // -F "file=@woods1.jpg" \
+ // localhost:7013/api/folder/1/upload/
+}
+
export function status () {
return {
cpu: serialize_task(state.current_cpu_task),
@@ -143,7 +167,8 @@ export function run_script(task, cb) {
cb("")
}
const module = modules[task.module]
- const { activity, interpreter, params } = build_params(module, task)
+ const activity = module.activities[task.activity]
+ const { interpreter, params } = build_params(module, activity, task)
if (! interpreter) return { type: 'error', error: "No such interpreter: " + activity.interpreter }
if (! activity.isScript) return { type: 'error', error: "Not a script: " + task.module }
@@ -163,7 +188,7 @@ export function run_task(task, preempt, watch){
}
export function run_task_with_activity(task, module, activity, preempt, watch) {
- const { interpreter, params } = build_params(module, task)
+ const { interpreter, params } = build_params(module, activity, task)
if (! interpreter) return { type: 'error', error: "No such interpreter: " + activity.interpreter }
if (interpreter.gpu && state.current_gpu_task.status !== 'IDLE') {
diff --git a/app/server/bridge.js b/app/server/bridge.js
index 776ec31..bbe0e26 100644
--- a/app/server/bridge.js
+++ b/app/server/bridge.js
@@ -91,7 +91,7 @@ function bind_client(socket){
if (process.env.CACHE_SYSCALLS) {
const id = make_client_id(data)
console.log('client', id)
- console.log(id in syscall_lookup)
+ // console.log(id in syscall_lookup)
if (id in syscall_lookup) {
const cached = syscall_lookup[id]
syscall_lookup[id].uuid = data.uuid
diff --git a/app/server/proxy.js b/app/server/proxy.js
index 2dcc1d7..7526f01 100644
--- a/app/server/proxy.js
+++ b/app/server/proxy.js
@@ -69,14 +69,14 @@ function serve(req, res) {
}
if (DEBUG) {
- server_res.on('data', s => console.log(s.toString()))
+ // server_res.on('data', s => console.log(s.toString()))
}
server_res.resume()
})
connector.on('error', e => console.error(e))
if (DEBUG) {
- req.on('data', s => console.log('>>', s.toString()))
+ // req.on('data', s => console.log('>>', s.toString()))
}
req.on('error', s => { console.log('/!\\ ERROR /!\\'); console.log(s) })
req.pipe(connector)
diff --git a/app/server/site.js b/app/server/site.js
index 23e2cb1..aa7087c 100644
--- a/app/server/site.js
+++ b/app/server/site.js
@@ -28,7 +28,7 @@ constĀ api_tasks = api(app, 'task')
upload.init()
// app.use('/upload', require('./upload'))
-app.post('/folder/:id/upload/',
+app.post('/api/folder/:id/upload/',
multer.array('file'),
function (req, res, next){
if (! req.files || ! req.files.length) {
@@ -40,7 +40,7 @@ app.post('/folder/:id/upload/',
var folder_id = req.params.id
var dirname = process.env.S3_PATH + '/data/' + folder_id + '/'
var promises = req.files.map((file) => {
- console.log(file)
+ // console.log(file)
return new Promise( (resolve, reject) => {
upload.put({
file: file,
@@ -60,9 +60,9 @@ app.post('/folder/:id/upload/',
datatype: file.mimetype.split('/')[0],
activity: req.body.activity || 'url',
module: req.body.module,
- epoch: 0,
- generated: false,
- processed: false,
+ epoch: parseInt(req.body.epoch) || 0,
+ generated: req.body.generated === 'true',
+ processed: req.body.processed === 'true',
// username: req.user.get('username'),
}