diff options
| -rw-r--r-- | app/api/crud.js | 98 | ||||
| -rw-r--r-- | app/api/index.js | 20 | ||||
| -rw-r--r-- | app/client/modules/module.reducer.js | 7 | ||||
| -rw-r--r-- | app/client/modules/samplernn/samplernn.reducer.js | 32 | ||||
| -rw-r--r-- | app/server/bridge.js | 85 | ||||
| -rw-r--r-- | app/server/db/bookshelf.js | 24 | ||||
| -rw-r--r-- | app/server/db/crud.js | 47 | ||||
| -rw-r--r-- | app/server/db/index.js | 5 | ||||
| -rw-r--r-- | app/server/db/loader.js | 101 | ||||
| -rw-r--r-- | app/server/db/model.js | 157 | ||||
| -rw-r--r-- | app/server/db/models.js | 53 | ||||
| -rw-r--r-- | app/server/site.js | 94 | ||||
| -rw-r--r-- | app/server/util/api.js | 100 | ||||
| -rw-r--r-- | app/server/util/upload.js | 80 |
14 files changed, 903 insertions, 0 deletions
diff --git a/app/api/crud.js b/app/api/crud.js new file mode 100644 index 0000000..1d7b305 --- /dev/null +++ b/app/api/crud.js @@ -0,0 +1,98 @@ +import fetch from 'node-fetch' + +export function crud(type_s, tag) { + const type = '/' + type_s + 's/' + (tag || '') + return { + index: (data) => { + return fetch(_get_url(type, data), _get_headers()) + .then(req => req.json()) + .catch(error) + }, + + show: (id) => { + return fetch(type + id) + .then(req => req.json()) + .catch(error) + }, + + create: (data) => { + return fetch(type, post(data)) + .then(req => req.json()) + .catch(error) + }, + + update: (data) => { + return fetch(type + data.id, put(data)) + .then(req => req.json()) + .catch(error) + }, + + destroy: (data) => { + return fetch(type + data.id, destroy(data)) + .then(req => req.json()) + .catch(error) + }, + + under: function(tag){ + return crud(type_s, tag) + }, + } +} + +function _get_url(_url, data) { + const url = new URL(window.location.origin + _url) + if (data) { + Object.keys(data).forEach(key => url.searchParams.append(key, data[key])) + } + return url +} +function _get_headers() { + return { + method: 'GET', + headers: { + 'Accept': 'application/json', + }, + } +} +function post(data) { + return { + method: 'POST', + body: JSON.stringify(data), + headers: { + 'Accept': 'application/json', + 'Content-Type': 'application/json' + }, + } +} +export function postBody(data) { + return { + method: 'POST', + body: data, + headers: { + 'Accept': 'application/json', + }, + } +} +function put(data) { + return { + method: 'PUT', + body: JSON.stringify(data), + headers: { + 'Accept': 'application/json', + 'Content-Type': 'application/json' + }, + } +} +function destroy(data) { + return { + method: 'DELETE', + body: JSON.stringify(data), + headers: { + 'Accept': 'application/json', + 'Content-Type': 'application/json' + }, + } +} +function error(err) { + console.warn(err) +} diff --git a/app/api/index.js b/app/api/index.js new file mode 100644 index 0000000..7562db7 --- /dev/null +++ b/app/api/index.js @@ -0,0 +1,20 @@ +import FormData from 'form-data' +import fetch from 'node-fetch' + +import { crud, postBody } from './crud' + +export const folder = crud('folder') + +folder.upload = (folder_id, files) => { + var data = new FormData() + for (var i = 0; i < files.length; i++) { + data.append('file', files[i]) + } + return fetch('/folders/' + folder_id, postBody(data)) + .then(req => req.json()) + .catch(error) +} + +export const file = crud('file') +export const task = crud('task') +export const job = crud('job') diff --git a/app/client/modules/module.reducer.js b/app/client/modules/module.reducer.js new file mode 100644 index 0000000..de2f9bc --- /dev/null +++ b/app/client/modules/module.reducer.js @@ -0,0 +1,7 @@ +import { combineReducers } from 'redux' + +import samplernnReducer from './samplernn/samplernn.reducer' + +export const moduleReducer = combineReducers({ + samplernn: samplernnReducer +}) diff --git a/app/client/modules/samplernn/samplernn.reducer.js b/app/client/modules/samplernn/samplernn.reducer.js new file mode 100644 index 0000000..a642919 --- /dev/null +++ b/app/client/modules/samplernn/samplernn.reducer.js @@ -0,0 +1,32 @@ +import types from '../../types' + +const samplernnInitialState = { + loading: false, + error: null, + datasets: [], + results: [], +} + +const samplernnReducer = (state = samplernnInitialState, action) => { + switch(action.type) { + case types.socket.connect: + return { + ...state, + } + case types.task.task_begin: + return { + ...state, + } + case types.task.task_finish: + if (state.runner[action.task.processor].task.uuid !== action.task.uuid) { + return state + } + return { + ...state, + } + default: + return state + } +} + +export default samplernnReducer diff --git a/app/server/bridge.js b/app/server/bridge.js new file mode 100644 index 0000000..6168580 --- /dev/null +++ b/app/server/bridge.js @@ -0,0 +1,85 @@ +import { server } from './site' +import socket_io from 'socket.io' + +const io = socket_io(server) +let relay_connected = false + +export const client = io.of('/client') +client.on('connect', bind_client) + +export const relay = (() => { + let relay; + if (process.env.EXPRESS_CONNECTS_TO_RELAY === 'true') { + console.log('Connecting to relay on ' + process.env.RELAY_REMOTE) + relay = require('socket.io-client').connect(process.env.RELAY_REMOTE) + bind_relay(relay) + } else { + relay = io.of('/relay') + relay.on('connect', bind_relay) + } + return relay +})() + +function bind_relay(socket) { + console.log('Relay connected') + relay_connected = true + client.emit('system_res', { type: 'relay_connected' }) + + socket.on('res', data => { + // console.log('Received response', data.cmd) + client.emit('res', data) + }) + + socket.on('status', data => { + // console.log('Received status', data.key) + client.emit('status', data) + }) + + socket.on('system_res', data => { + // console.log('System responded', data.type) + client.emit('system_res', data) + }) + + socket.on('task_res', data => { + client.emit('task_res', data) + }) + + socket.on('frame', (data) => { + client.volatile.emit('frame', data) + }) + + socket.on('disconnect', () => { + console.log('Relay disconnected') + relay_connected = false + client.emit('system_res', { type: 'relay_disconnected' }) + }) +} + +function bind_client(socket){ + console.log('Client connected') + if (relay_connected) { + socket.emit('system_res', { type: 'relay_connected' }) + relay.emit('system', { cmd: 'get_status' }) + } else { + socket.emit('system_res', { type: 'relay_disconnected' }) + } + + socket.on('cmd', data => { + // console.log('Client sent command', data) + relay.emit('cmd', data) + }) + + socket.on('system', data => { + // console.log('Client sent system command', data) + relay.emit('system', data) + }) + + socket.on('task', data => { + // console.log('Client sent task command', data) + relay.emit('task', data) + }) + + socket.on('disconnect', () => { + console.log('Client disconnected') + }) +} diff --git a/app/server/db/bookshelf.js b/app/server/db/bookshelf.js new file mode 100644 index 0000000..27d9dbb --- /dev/null +++ b/app/server/db/bookshelf.js @@ -0,0 +1,24 @@ +const knex = require('knex')({ + client: 'mysql2', + connection: { + host : process.env.DB_HOST, + user : process.env.DB_USER, + password : process.env.DB_PASS, + database : process.env.DB_NAME, + charset : 'utf8', + typecast : function (field, next) { + console.log(field.type) + if (field.type == 'BLOB') { + return field.string() + } + return next() + } + } +}) + +const bookshelf = require('bookshelf')(knex) + +module.exports = { + bookshelf: bookshelf, + knex: knex, +} diff --git a/app/server/db/crud.js b/app/server/db/crud.js new file mode 100644 index 0000000..3aa0127 --- /dev/null +++ b/app/server/db/crud.js @@ -0,0 +1,47 @@ +module.exports = function(model) { + return { + index: (q) => { + return model.query( (qb) => { + const limit = q.limit || 100 + const offset = q.offset || 0 + const orderBy = q.orderBy || 'id desc' + if (limit) { + delete q.limit + } + if (q.offset) { + delete q.offset + } + delete q.orderBy + if (Object.keys(q).length > 0) qb.where(q) + if (orderBy) { + const ob = orderBy.split(" ") + const ob_field = ob[0] || 'id' + const ob_dir = ob[1] || 'desc' + qb.orderBy(ob_field, ob_dir) + } + if (limit) qb.limit( limit ) + if (offset) qb.offset( offset ) + // console.log(qb) + return qb + }).fetchAll() + }, + show: (id) => { + return new model({'id': id}).fetch() + }, + show_ids: (ids) => { + return model.query( (qb) => { + qb.whereIn('id', ids) + return qb + }).fetchAll() + }, + create: (data) => { + return new model(data).save() + }, + update: (id, data) => { + return new model({'id': id}).save(data) + }, + destroy: (id) => { + return new model({'id': id}).destroy() + }, + } +} diff --git a/app/server/db/index.js b/app/server/db/index.js new file mode 100644 index 0000000..c89afc3 --- /dev/null +++ b/app/server/db/index.js @@ -0,0 +1,5 @@ +let db = module.exports + +db.crud = require('./crud') +db.model = require('./model') +db.models = require('./models') diff --git a/app/server/db/loader.js b/app/server/db/loader.js new file mode 100644 index 0000000..ad42b17 --- /dev/null +++ b/app/server/db/loader.js @@ -0,0 +1,101 @@ +module.exports = (function(){ + function Loader (readyCallback, view){ + this.assets = {}; + this.images = []; + this.readyCallback = readyCallback || function(){}; + this.count = 0 + this.view = view + this.loaded = false + } + + // Set the callback when the loader is ready + Loader.prototype.onReady = function(readyCallback){ + this.readyCallback = readyCallback || function(){}; + } + + // Register an asset as loading + Loader.prototype.register = function(s){ + this.assets[s] = false; + this.count += 1 + } + + // Signal that an asset has loaded + Loader.prototype.ready = function(s){ + // window.debug && console.log("ready >> " + s); + + this.assets[s] = true; + if (this.loaded) return; + + this.view && this.view.update( this.percentRemaining() ) + + if (! this.isReady()) return; + + this.loaded = true; + if (this.view) { + this.view && this.view.finish(this.readyCallback) + } + else { + this.readyCallback && this.readyCallback(); + } + } + + // (boolean) Is the loader ready? + Loader.prototype.isReady = function(){ + return ! Object.keys(this.assets).some( (key) => { + return ! this.assets[key] + }) + } + + // (float) Percentage of assets remaining + Loader.prototype.percentRemaining = function(){ + return this.remainingAssets() / this.count + } + + // (int) Number of assets remaining + Loader.prototype.remainingAssets = function(){ + var n = 0; + for (var s in this.assets) { + if (this.assets.hasOwnProperty(s) && this.assets[s] != true) { + n++; + // console.log('remaining: ' + s); + } + } + return n; + } + + // Preload the images in config.images + Loader.prototype.preloadImages = function(images){ + this.register("preload"); + for (var i = 0; i < images.length; i++) { + this.preloadImage(images[i]); + } + this.ready("preload"); + } + Loader.prototype.preloadImage = function(src, register, cb){ + if (! src || src == "none") return; + var _this = this; + if (! cb && typeof register === "function") { + cb = register + register = null + } + if (register) { + this.register(src); + } + var img = new Image(), loaded = false; + img.onload = function(){ + if (loaded) return + loaded = true + if (cb) { + cb(img); + } + if (register) { + _this.ready(src); + } + } + img.src = src; + if (img.complete) img.onload(); + _this.images.push(img); + } + + return Loader; +})(); diff --git a/app/server/db/model.js b/app/server/db/model.js new file mode 100644 index 0000000..d84f138 --- /dev/null +++ b/app/server/db/model.js @@ -0,0 +1,157 @@ +const Loader = require('./loader') +const db_crud = require('./crud') + +module.exports = function modelScope(type, db_model, _props) { + + const props = Object.assign({ + hasOne: {}, + afterCreate: () => {}, + }, _props) + + const crud = db_crud(db_model) + + const model = { + type: type, + db_model: db_model, + crud: crud, + + index: (query) => { + + return new Promise( (resolve, reject) => { + crud.index(query).then( (data) => { + + if (! props.hasOne) { + resolve(data ? data.toJSON() : []) + } + else { + let recs = data.toJSON() + const loader = new Loader () + loader.onReady( () => { + // console.log(type, 'ready') + resolve(recs) + }) + // console.log('hasOne') + loader.register('hasOne') + Object.keys(props.hasOne).forEach( (key,i) => { + loader.register(key) + // console.log('register', key) + const type = props.hasOne[key] + const id_lookup = {} + recs.forEach(r => { + const id = r[key + '_id'] + id_lookup[id] = id_lookup[id] || [] + id_lookup[id].push(r) + }) + // console.log('\n\n%%%%%%%%%%%%%%%%%%%%%%%% index > hasOne ' + key + '\n\n\n') + // console.log(recs.length, Object.keys(id_lookup).length) + db_crud(type).show_ids(Object.keys(id_lookup)).then( (sub_recs) => { + // console.log(key, 'sub_recs', sub_recs) + const short_key = key.replace('_id','') + sub_recs.toJSON().forEach(rec => { + id_lookup[rec.id].forEach( parent_rec => parent_rec[short_key] = rec ) + }) + // console.log('ready', key) + loader.ready(key) + }) + }) + loader.ready('hasOne') + } + }) // }).catch( () => res.sendStatus(500) ) + }) + }, + + show: (id) => { + return new Promise( (resolve, reject) => { + crud.show(id).then( (data) => { + if (! props.hasOne) { + resolve(data.toJSON()) + } + else { + let rec = data.toJSON() + const loader = new Loader () + loader.onReady( () => { + resolve(rec) + }) + loader.register('hasOne') + Object.keys(props.hasOne).forEach( (key,i) => { + loader.register(key) + const type = props.hasOne[key] + db_crud(type).show(rec[key + '_id']).then( (sub_rec) => { + rec[key] = sub_rec + loader.ready(key) + }) + }) + loader.ready('hasOne') + } + }) // .catch( (err) => res.sendStatus(500) ) + }) + }, + + findOrCreate: (data) => { + return new Promise( (resolve, reject) => { + let query = Object.assign({}, data) + query.limit = 1 + crud.index(query).then( (recs) => { + if (recs && recs.length) { + const rec = recs.at(0) + // console.log('found rec', data.name) + return resolve(rec) + } + // console.log('creating rec', data.name) + model.create(data).then( (rec) => { + resolve(rec) + }) + }) + }) + }, + + create: (data) => { + return new Promise( (resolve, reject) => { + crud.create( model.sanitize(data) ).then( (data) => { + resolve(data.toJSON()) + props.afterCreate && props.afterCreate(data) + }).catch( (e) => { + console.error('error creating', e) + reject() + }) + }) + }, + + update: (id, data) => { + // console.log('update', id) + return new Promise( (resolve, reject) => { + crud.update(id, model.sanitize(data)).then( (data) => { + resolve(data.toJSON()) + }).catch( (e) => { + console.error('error updating', e) + reject() + }) + }) + }, + + destroy: (id) => { + return new Promise( (resolve, reject) => { + crud.destroy(id).then( (data) => { + resolve(data.toJSON()) + })// .catch( () => res.sendStatus(500) ) + }) + }, + + sanitize: (data) => { + var valid = {} + props.fields.forEach(key => { + if (props.hasOne[key]) { + return + } + if (key in data) { + valid[key] = data[key] + } + }) + // console.log(valid) + return valid + }, + + } + + return model +} diff --git a/app/server/db/models.js b/app/server/db/models.js new file mode 100644 index 0000000..2108148 --- /dev/null +++ b/app/server/db/models.js @@ -0,0 +1,53 @@ + +let fs = require('fs') +let model = require('./model') +let bookshelf = require("./bookshelf").bookshelf +import bridge from '../bridge' + +let Folder = bookshelf.Model.extend({ + tableName: 'folders', + hasTimestamps: true, +}) +let File = bookshelf.Model.extend({ + tableName: 'files', + hasTimestamps: true, +}) +let Job = bookshelf.Model.extend({ + tableName: 'jobs', + hasTimestamps: true, +}) +let Task = bookshelf.Model.extend({ + tableName: 'tasks', + hasTimestamps: true, +}) + +module.exports = { + folder: model('folder', Folder, { + fields: "name username description".split(" "), + afterCreate: (folder) => { + fs.mkdir('data/' + folder.get('id') + '/', function(){ + console.log('created folder', folder.get('id'), folder.get('name')) + }) + } + }), + file: model('file', File, { + fields: "folder_id username name mime type duration analysis size processed generated".split(" "), + afterCreate: (file) => { + bridge.processFiles() + } + }), + job: model('job', Job, { + fields: "name username completed tool".split(" "), + }), + task: model('task', Task, { + fields: "job_id username completed processing tool content_file_id style_file_id output_file_id alpha iterations stdout stderr".split(" "), + afterCreate: (task) => { + bridge.processTasks() + }, + hasOne: { + content_file: File, + style_file: File, + output_file: File, + } + }), +} diff --git a/app/server/site.js b/app/server/site.js new file mode 100644 index 0000000..44c4508 --- /dev/null +++ b/app/server/site.js @@ -0,0 +1,94 @@ +require('dotenv').config() +const express = require('express') +const http = require('http') +const path = require('path') +const api = require('./util/api') +const multer = require('multer')() +const upload = require('./util/upload') +const bodyParser = require('body-parser') +// const multer = require('multer') +// const upload = multer({ dest: 'uploads/' }) + +export const app = express() +export const server = http.createServer(app) +server.listen(process.env.EXPRESS_PORT, () => { + console.log('Cortex remote listening on http://localhost:' + server.address().port) +}) + +app.disable('x-powered-by') +app.use(bodyParser.json()) +app.use(bodyParser.urlencoded({ extended: false })) +app.use(express.query()) +app.use(express.static()) +app.use(express.static(path.join(__dirname, '../public'), { extensions: ['html'] })) + +upload.init() + +const api_folders = api(app, 'folder') +const api_files = api(app, 'file') +const api_jobs = api(app, 'job') +const api_tasks = api(app, 'task') + +// app.use('/upload', require('./upload')) + +app.post('/folders/:id', + multer.array('file'), + function (req, res, next){ + if (! req.files || ! req.files.length) { + res.json({ error: "no files" }) + return + } + var thread_id = res.thread.get('id') + var dirname = process.env.S3_PATH + '/data/' + thread_id + '/' + var promises = req.files.map((file) => { + return new Promise( (resolve, reject) => { + upload.put({ + file: file, + preserveFilename: true, + dirname: dirname, + unacceptable: function(err){ + reject(err) + }, + success: function(url){ + console.log("file >", url) + var data = { + folder_id: req.params.id, + name: fn, + size: file.size, + generated: false, + processed: false, + + thread: res.thread.get('id'), + username: req.user.get('username'), + filename: file.originalname, + date: util.now(), + size: file.size, + privacy: false, + storage: process.env.S3_BUCKET, + } + + const fn = file.originalname + api_files.create({ + // table.string('username') + }).then( (file) => { + resolve(file) + }).catch( (err) => { + console.warn(err) + res.sendStatus(500) + }) + } + }) + }) + }) + Promise.all(promises).then(files => { + res.json({ files }) + next() + }).catch(err => { + console.log(err) + }) + }) + +app.get('/:module/:mode/', serve_index) +app.get('/', serve_index) + +function serve_index(req, res) { res.sendFile(path.join(__dirname, '../../public', 'index.html')) } diff --git a/app/server/util/api.js b/app/server/util/api.js new file mode 100644 index 0000000..2b2a9e4 --- /dev/null +++ b/app/server/util/api.js @@ -0,0 +1,100 @@ +const db = require('../db') + +export function api (app, type) { + const type_s = '/' + type + 's/' + const type_id = type_s + ':id' + + const model = db.models[type] + + // index + app.get(type_s, (req, res) => { + console.log('index', type) + model.index(req.query).then( data => res.json(data) ) + }) + + // show + app.get(type_id, (req, res) => { + console.log('show', type, req.params.id) + model.show(req.params.id).then( (data) => { + res.json(data) + }) + }) + + // create + app.post(type_s, (req, res) => { + console.log('create', type) + model.create(req.body).then( (data) => { + res.json(data) + })// .catch( () => res.sendStatus(500) ) + }) + + // update + app.put(type_id, (req, res) => { + console.log('update', type, req.params.id) + model.update(req.body.id, req.body).then( (data) => { + res.json(data) + })// .catch( () => res.sendStatus(500) ) + }) + + // destroy + app.delete(type_id, (req, res) => { + console.log('destroy', type, req.params.id) + model.destroy(req.params.id).then( (data) => { + res.json(data) + })// .catch( () => res.sendStatus(500) ) + }) + + return model +} + +const upload = require('./upload') + +export function uploadFile(req, res, next) { + if (! req.file) { + res.json({ error: "no files" }) + return + } + var dirname = '/cortex/' + req.params.tool + '/' + upload.put({ + file: req.file, + dirname: dirname, + preserveFilename: true, + unacceptable: function(err){ + res.sendStatus({ error: 'Problem uploading file.' }) + }, + success: function(url){ + res.urls = url + next() + } + }) +} + +export function uploadFiles(req, res, next){ + if (! req.files || ! req.files.length) { + res.json({ error: "no files" }) + return + } + var dirname = '/cortex/' + req.params.tool + '/' + var promises = req.files.map((file) => { + return new Promise( (resolve, reject) => { + upload.put({ + file: file, + dirname: dirname, + preserveFilename: true, + unacceptable: function(err){ + reject(err) + }, + success: function(url){ + console.log("file >", url) + resolve(url) + } + }) + }) + }) + Promise.all(promises).then(values => { + res.urls = urls + next() + }).catch(err => { + console.log(err) + }) +}
\ No newline at end of file diff --git a/app/server/util/upload.js b/app/server/util/upload.js new file mode 100644 index 0000000..9e15748 --- /dev/null +++ b/app/server/util/upload.js @@ -0,0 +1,80 @@ + +var knox = require('knox') +var uuid = require('uuid/v1') + +var s3 = knox.createClient({ + key: process.env.S3_KEY, + secret: process.env.S3_SECRET, + bucket: process.env.S3_BUCKET, +}) + +var acceptableuploadTypes = { + 'image/gif': 'gif', + 'image/jpeg': 'jpg', + 'image/jpg': 'jpg', + 'image/png': 'png', +} + +module.exports = {} + +module.exports.client = function(){ + return s3 +} + +module.exports.put = function (opt) { + var filename + var err + var now = new Date() + + var file = opt.file + + var types = opt.types + var extension = types && types[file.mimetype] + + if (opt.filename) { + filename = opt.filename + } else if (opt.preserveFilename) { + filename = file.originalname + } else { + filename = uuid() + "." + extension; + } + + var remote_path = opt.dirname + filename + + if (types && ! extension) { + err = "Unacceptable filetype." + } + else if (opt.maxSize && file.size > opt.maxSize) { + err = "File too large. Uploads can be a maximum of " + opt.maxSize + " bytes." + } + + if (err) { + console.error(">>>", err) + opt.unacceptable && opt.unacceptable(err) + return + } + + opt.acceptable && opt.acceptable(err) + + console.log("upload >", remote_path) + s3.putBuffer(file.buffer, remote_path, { + 'Content-Length': file.size, + 'Content-Type': file.mimetype, + 'x-amz-acl': 'public-read' + }, function(err, s3res) { + if (err || s3res.statusCode !== 200) { + console.error(err); + if (s3res && s3res.resume) { + s3res.resume() + } + return; + } + + var file_url = s3res.url || s3res.req.url + + opt.success && opt.success(file_url) + }).on('error', function(err, s3res){ + console.error(err) + s3res && s3res.resume && s3res.resume() + }) +} |
