summaryrefslogtreecommitdiff
path: root/bucky/util/federate.js
blob: 5181d852b87f2fd48a7e4f66aff65d65a8ae9f50 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
var fetch = require('node-fetch')
const fs = require('fs')
var db = require('../db')
var upload = require('../util/upload')
var mime = require('mime-types')

module.exports = {

  route: (app) => {
    app.put('/raw/import/thread/',  importRaw('thread', 'Thread'),  (req, res) => res.send({ status: 'ok', el: res.el }))
    app.put('/raw/import/keyword/', importRaw('keyword', 'Keyword'), (req, res) => res.send({ status: 'ok', el: res.el }))
    app.put('/raw/import/file/',    importRaw('file', 'File'),    (req, res) => res.send({ status: 'ok', el: res.el }))
    app.put('/raw/import/comment/', importRaw('comment', 'Comment'), (req, res) => res.send({ status: 'ok', el: res.el }))

    app.get('/raw/export/thread/:id',  exportThread,  (req, res) => res.send({ status: 'ok' }))
    app.get('/raw/export/keyword/:keyword', exportKeyword, (req, res) => res.send({ status: 'ok' }))

    function importRaw (type, model) {
      return (req, res, next) => {
        console.log('importing', type, req.body.id)
        delete req.body.id
        db[model].forge(req.body).save().then((el) => {
          res.el = el;
          next()
        }).catch(e => {
          console.error(e)
          next()
        })
      }
    }
    function exportKeyword (req, res, next) {
      console.log('export keyword', req.params.keyword)
      return db.getKeyword(
        req.params.keyword
      ).then(keyword => send("keyword", keyword)
      ).then(res => db.getThreadsForKeyword(req.params.keyword)
      ).then(threads => threads.reduce((promise, thread) => (
          promise.then(result => exportThread({ params: { id: thread.get('id') } }, res, function(){}))
        ), new Promise (resolve => resolve()))
      ).then(() => next())
    }
    function exportThread (req, res, next) {
      var thread_id
      return db.getThread(
        req.params.id
      
      ).then(thread => send("thread", thread)
      ).then(json => {
        console.log('got thread id', json.el.id)
        return db.getCommentsForThread(req.params.id)
      
      }).then(comments => comments.reduce((promise, comment) => (
          promise.then(result => {
            comment.set('thread', thread_id)
            return send("comment", comment)
          })
        ), new Promise(resolve => resolve()))
      
      ).then(() => db.getFilesForThread(req.params.id)
      ).then(files => files.reduce((promise, file) => (
          promise.then(result => {
            copyFileToS3(file, thread_id)
            file.set('thread', thread_id)
            file.set('storage', process.env.S3_BUCKET)
            return send("file", file)
          })
        ), new Promise(resolve => resolve()))
      
      ).then(() => next()
      ).catch(e => {
        console.error(e)
        next()
      })
    }
    function send(type, data){
      console.log('sending', type, data.get('id'))
      var json = data.toJSON()
      return fetch("https://bucky.asdf.us/raw/import/" + type, {
        method: 'PUT',
        body: JSON.stringify(json),
        headers: {
          'Content-Type': 'application/json',
          'Accept': 'application/json',
        },
      }).then(res => res.json())
      .then(json => {
        console.log(json)
        return json
      })
    }
    function copyFileToS3(file, thread_id){
      // since for now we are essentially backing up local files,
      // upload them directly to s3
      const src_fn = file.get('thread') + '/' + file.get('filename')
      const dst_fn = thread_id + '/' + file.get('filename')
      fs.readFile('/Users/user/projects/bucky3/public/data/' + src_fn, (err, buffer) => {
        if (err) return console.log(err)
        const remote_path = process.env.S3_PATH + '/data/' + dst_fn
        console.log(mime.lookup(file.get('filename')))
        upload.client().putBuffer(buffer, remote_path, {
          'Content-Length': buffer.length,
          'Content-Type': mime.lookup(file.get('filename')),
          'x-amz-acl': 'public-read'
        }, function(err, s3res) {
          if (err || s3res.statusCode !== 200) {
            console.error(err);
            if (s3res && s3res.resume) {
              s3res.resume()
            }
            return;
          }

          var file_url = s3res.url || s3res.req.url		

          console.log(file_url)
        }).on('error', function(err, s3res){
          console.error(err)
          s3res && s3res.resume && s3res.resume()
        })
      })
    }
  }
}