summaryrefslogtreecommitdiff
path: root/app/client/dataset/dataset.loader.js
blob: ad95cd7a24e501d46826740625600346a5e5d122 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
import actions from '../actions'

const unsortedFolders = {}

export const unsortedFolder = (module, instantiate=false) => {
  if (!unsortedFolders[module] || instantiate) {
    const folder = {
      id: 0,
      module,
      name: 'unsorted',
      files: [],
      datasets: [],
    }
    unsortedFolders[module] = folder
  }
  return unsortedFolders[module]
}

export const emptyDataset = (module, datasetLookup, name, folder) => {
  const dataset = {
    name,
    input: [],
    checkpoints: [],
    output: [],
  }
  datasetLookup[name] = dataset
  folder = folder || unsortedFolder(module)
  folder.datasets.push(name)
  return dataset
}

export const getDataset = (module, datasetLookup, name, folder, date) => {
  let dataset = datasetLookup[name] || emptyDataset(module, datasetLookup, name, folder)
  // folder = folder || unsortedFolder(module)
  if (date) {
    dataset.date = (dataset.date && ! isNaN(dataset.date)) ? Math.max(+new Date(date), dataset.date) : +new Date(date)
  }
  return dataset
}

export const load = module => {
  console.log(module)
  return Promise.all([
    actions.folder.index({ module }),
    actions.file.index({ module }),
    actions.task.index({ module }),
  ]).then(res => {
    const [folders, files, tasks] = res

    let datasetLookup = {}
    let folderLookup = {}
    let fileLookup = {}

    // take all of the folders and put them in a lookup
    folderLookup = folders.reduce((folderLookup, folder) => {
      folderLookup[folder.id] = {
        id: folder.id,
        name: folder.name,
        folder,
        files: [],
        datasets: [],
      }
      return folderLookup
    }, {
      unsorted: unsortedFolder(module, true)
    })

    // prepare the files by splitting into two groups
    const generatedFiles = files.filter(file => file.generated)
    const ungeneratedFiles = files.filter(file => !file.generated)

    // build the initial dataset lookup table using the ungenerated files
    ungeneratedFiles.reduce((datasetLookup, file) => {
      fileLookup[file.id] = file
      if (! file.name) {
        file.name = (file.opt || {}).token || file.url
      }
      const name = (file.name || 'unsorted').split('.')[0]
      const folder = folderLookup[file.folder_id] || unsortedFolder(module)
      const dataset = getDataset(module, datasetLookup, name, folder, file.date || file.created_at)
      if (file.url.match(file.name)) file.persisted = true
      dataset.input.push(file.id)
      folder.files.push(file.id)
      return datasetLookup
    }, datasetLookup)

    // go over the generated files and add addl datasets (if the files were deleted)
    generatedFiles.map(file => {
      fileLookup[file.id] = file
      const pair = file.name.split('.')[0].split('-')
      const folder = folderLookup[file.folder_id] || unsortedFolder(module)
      const dataset = getDataset(module, datasetLookup, pair[0], folder, file.date || file.created_at)
      dataset.output.push(file.id)
      folder.files.push(file.id)
      file.epoch = file.epoch || pair[1]
    })

    return {
      folderLookup,
      fileLookup,
      datasetLookup,
      folders,
      files,
      unsortedFolder: folderLookup.unsorted,
    }
  }).catch(e => {
    console.error(e)
  })
}