var knox = require('knox') var uuid = require('uuid/v1') var s3 var acceptableuploadTypes = { 'image/gif': 'gif', 'image/jpeg': 'jpg', 'image/jpg': 'jpg', 'image/png': 'png', } module.exports = {} module.exports.init = function (){ s3 = knox.createClient({ key: process.env.S3_KEY, secret: process.env.S3_SECRET, bucket: process.env.S3_BUCKET, }) } module.exports.client = function(){ return s3 } module.exports.put = function (opt) { var filename var err var now = new Date() var file = opt.file var types = opt.types var extension = types && types[file.mimetype] if (opt.filename) { filename = opt.filename } else if (opt.preserveFilename) { filename = file.originalname } else { filename = uuid() + "." + extension; } var remote_path = opt.dirname + filename if (types && ! extension) { err = "Unacceptable filetype." } else if (opt.maxSize && file.size > opt.maxSize) { err = "File too large. Uploads can be a maximum of " + opt.maxSize + " bytes." } if (err) { console.error(">>>", err) opt.unacceptable && opt.unacceptable(err) return } opt.acceptable && opt.acceptable(err) console.log("upload >", remote_path) s3.putBuffer(file.buffer, remote_path, { 'Content-Length': file.size, 'Content-Type': file.mimetype, 'x-amz-acl': 'public-read' }, function(err, s3res) { if (err || s3res.statusCode !== 200) { console.error(err); if (s3res && s3res.resume) { s3res.resume() } return; } var file_url = s3res.url || s3res.req.url opt.success && opt.success(file_url) }).on('error', function(err, s3res){ console.error(err) s3res && s3res.resume && s3res.resume() }) }