diff options
| author | Jules Laplace <julescarbon@gmail.com> | 2019-08-05 02:06:29 +0200 |
|---|---|---|
| committer | Jules Laplace <julescarbon@gmail.com> | 2019-08-05 02:06:29 +0200 |
| commit | 626c348af622b9bb66000d1a49dbe007131649ef (patch) | |
| tree | 59127e1e69a209c1f0823050c17979cd5253d312 /client | |
sonifications
Diffstat (limited to 'client')
| -rw-r--r-- | client/data.js | 37 | ||||
| -rw-r--r-- | client/index.js | 204 | ||||
| -rw-r--r-- | client/lib/color.js | 31 | ||||
| -rw-r--r-- | client/lib/intonation.js | 162 | ||||
| -rw-r--r-- | client/lib/kalimba.js | 50 | ||||
| -rw-r--r-- | client/lib/keys.js | 39 | ||||
| -rw-r--r-- | client/lib/midi.js | 197 | ||||
| -rw-r--r-- | client/lib/scales.js | 299 | ||||
| -rw-r--r-- | client/lib/startAudioContext.js | 181 | ||||
| -rw-r--r-- | client/lib/ui.js | 50 | ||||
| -rw-r--r-- | client/lib/util.js | 139 | ||||
| -rw-r--r-- | client/old_index.js | 381 | ||||
| -rw-r--r-- | client/player.js | 165 |
13 files changed, 1935 insertions, 0 deletions
diff --git a/client/data.js b/client/data.js new file mode 100644 index 0000000..855da4a --- /dev/null +++ b/client/data.js @@ -0,0 +1,37 @@ +const files = [ + // "gun_violence", + "mass_shootings_from_columbine", + "firearms_manufactured", + "ar_15_2016_18", +] +const parse = require('csv-parse') + +const dataPromises = files.map(name => { + return fetch('./data/' + name + '.csv').then(rows => { + return rows.text() + }).then(text => { + return new Promise((resolve, reject) => { + parse(text, {}, (_, lines) => resolve(lines)) + }) + }).then(lines => { + // console.log(name, lines) + const h = lines.shift() + return { + name, + h, + lines: lines.filter(s => !!s) + } + }) +}) +const allPromises = Promise.all(dataPromises).then(data => { + return data.reduce((a,b) => { + // console.log(b) + a[b.name] = b + return a + }, {}) +}) +const load = () => { + return allPromises +} + +export { load } diff --git a/client/index.js b/client/index.js new file mode 100644 index 0000000..83b5214 --- /dev/null +++ b/client/index.js @@ -0,0 +1,204 @@ +import * as util from './lib/util' +import * as data from './data' +import * as player from './player' + +/* initialization */ + +const mass_fields = [ + "date", "timestamp", + "fatalities", "injured", "total_victims", + "age", "case", "weapon_type", "weapon_details" +].reduce((a,b,i) => { + a[b] = i + return a +}, {}) + +const gv_fields = [ + "incident_id", "date", + "state", "city_or_county", "address", + "n_killed", "n_injured", + "incident_url", "source_url", "incident_url_fields_missing", + "congressional_district", + "gun_stolen", "gun_type", "incident_characteristics", + "latitude", "location_description", "longitude", + "n_guns_involved", + "notes", + "participant_age", "participant_age_group", "participant_gender", "participant_name", "participant_relationship", "participant_status", "participant_type", + "sources", + "state_house_district", "state_senate_district" +].reduce((a,b,i) => { + a[b] = i + return a +}, {}) + +const year_days_by_month = [ + 31, 28, 31, 30, + 31, 30, 31, 31, + 30, 31, 30, 31, + 0 +].reduce((a, b, i) => { + if (i === 0) { + return [b] + } + return a.concat(a[i-1] + b) +}, []) + +let i = 0, max_i = 0, mass_i = 0 +let datasets = {}, dataset = {}, bounds = {}, diff = [] +let play_fn = play_sequence +data.load().then(lists => { + console.log(lists) + + const ar15 = lists.ar_15_2016_18 + datasets['AR-15 2016-18'] = {} + datasets['AR-15 2016-18'].name = 'AR-15 (2016-18)' + datasets['AR-15 2016-18'].pedal = true + datasets['AR-15 2016-18'].play_fn = play_mass_shootings + const ar_lines = ar15.lines.map(l => { + if (l[gv_fields.incident_characteristics].match('Shots Fired - No Injuries')) { + return null + } + if (l[gv_fields.n_killed] + l[gv_fields.n_injured] < 4) return null + const [y, m, d] = l[gv_fields.date].split('-') + if (parseInt(y) > 2017) return null + const yy = (parseInt(y) - 2016) * 365 + const mm = year_days_by_month[parseInt(m)] + const dd = Math.floor(parseInt(d)) + 14 + const date = Math.floor((yy + mm + dd) / 7) + // console.log(date, y, m, d) + let total = l[gv_fields.n_killed] + l[gv_fields.n_injured] + if (l[gv_fields.n_killed] === 0) { + total = - l[gv_fields.n_injured] + } + return [ + date, + Math.log(Math.log(total + 10) + 1), + "** !!, $$, {} killed, [] injured".replace('**', l[gv_fields.date]).replace('!!', l[gv_fields.city_or_county]).replace('$$', l[gv_fields.state]).replace('{}', l[gv_fields.n_killed]).replace('[]', l[gv_fields.n_injured]), + l[gv_fields.n_killed], + l[gv_fields.n_injured], + ] + }).filter(n => !!n) + datasets['AR-15 2016-18'].dates = ar_lines.map(a => a[0]) + datasets['AR-15 2016-18'].dates.push(ar_lines.length) + datasets['AR-15 2016-18'].lines = [ar_lines.map(a => a[1])] + datasets['AR-15 2016-18'].labels = ar_lines.map(a => a[2]) + + const fm = lists.firearms_manufactured + datasets['Firearms Manufactured'] = {} + datasets['Firearms Manufactured'].name = 'Firearms Manufactured' + datasets['Firearms Manufactured'].play_fn = play_sequence + datasets['Firearms Manufactured'].h = fm.h.slice(1, 5) + datasets['Firearms Manufactured'].labels = fm.lines.map(l => l.slice(0, 1)) + datasets['Firearms Manufactured'].lines = fm.lines.map(l => l.slice(1, 5)) + + datasets["Mass Shootings"] = lists.mass_shootings_from_columbine + datasets["Mass Shootings"].name = "Mass Shootings" + datasets["Mass Shootings"].pedal = true + datasets["Mass Shootings"].isMass = true + datasets["Mass Shootings"].play_fn = play_mass_shootings + const lines = datasets["Mass Shootings"].lines.reverse() + const [min_y, ...rest_a] = lines[0][mass_fields.date].split('/') + const [max_y, ...rest_b] = lines[lines.length-1][mass_fields.date].split('/') + + datasets["Mass Shootings"].dates = lines.map(row => { + const [y, m, d] = row[mass_fields.date].split('/') + return (parseInt(y) - parseInt(min_y)) * 12 + parseInt(m) + }) + datasets["Mass Shootings"].max_i = (parseInt(max_y) - parseInt(min_y)) * 12 + parseInt(12) + // console.log('max i', max_i) + datasets["Mass Shootings"].data = lines + datasets["Mass Shootings"].lines = [lines.map(row => row[mass_fields.total_victims])] + ready() +}) + +/* play function for mass shooting data w/ custom timing */ + +function play_mass_shootings(i, bounds, diff, note_time, channel="all", exporting) { + const { min, max } = bounds + const total = dataset.dates.length + let pedal_note + let notes = [], midi_notes = [] + let cases = [] + let timings + let week = Math.floor((i)/4) % 4 + let year = Math.floor((i - (4*4*3)) / 48) // + 2 + console.log(year) + let yy = -year + if (year > 0) year += 1 + let this_one = 0 + // console.log(i, mass_i, dataset.dates[mass_i], channel, exporting) + while (i >= dataset.dates[mass_i] && mass_i < total) { + // console.log(i, dataset.dates[mass_i]) + notes.push(dataset.lines[0][mass_i]) + if (dataset.isMass) { + cases.push(dataset.data[mass_i][mass_fields.date] + ' ' + dataset.data[mass_i][mass_fields.case] + + ", " + dataset.data[mass_i][mass_fields.fatalities] + ' dead, ' + dataset.data[mass_i][mass_fields.injured] + ' injured') + } else { + cases.push(dataset.labels[mass_i]) + // console.log(dataset.labels[mass_i]) + } + // console.log('push case', dataset.data[mass_i][mass_fields.date] + ' ' + dataset.data[mass_i][mass_fields.case]) + mass_i += 1 + this_one += 1 + if (this_one >= 4) break + } + + if (cases.length) { + document.querySelector('#cases').innerHTML = cases.join('<br>') + } + + if (total <= mass_i) { + mass_i = 0 + i = 0 + } else { + i += 1 + } + + return [i, [], [], pedal_note] +} + +/* play the next note in sequence */ + +function play_sequence(i, bounds, diff, note_time, channel="all", exporting) { + const { rows, min, max } = bounds + const count = rows.length * rows[0].length + if (i >= count) i = 0 + const y = Math.floor(i / rows[0].length) + const x = i % rows[0].length + // if (!x) console.log(y) + const n = rows[y][x] + i += 1 + if (i >= count) i = 0 + const midi_note = play_note( norm(n, min, max) * nx.multiply.value, note_time, channel, exporting) + return [i, [midi_note], [128]] +} + +/* play next note according to sonification */ + +function play_next(){ + if (paused) return + let note_time = 120000 / Tone.Transport.bpm.value * note_values[nx.timing.active][0] + clearTimeout(playTimeout) + playTimeout = setTimeout(play_next, note_time) + let [new_i, notes, timings] = play_fn(i, bounds, diff, note_time) + if (dataset.labels) { + // const j = Math.floor(i / bounds.rows[0].length) + // document.querySelector('#cases').innerHTML = dataset.labels[j] + } + + i = new_i + if (recording) { + let timing = note_values[nx.timing.active][2] + if (timing.length) timing = timing[i % timing.length] + recorder.addEvent(new MidiWriter.NoteEvent({ pitch: notes, duration: 't' + timing })) + } +} + +/* build and bind the UI */ + +function ready() { + document.querySelector('.loading').classList.remove('loading') +} + + + diff --git a/client/lib/color.js b/client/lib/color.js new file mode 100644 index 0000000..bd5b7ce --- /dev/null +++ b/client/lib/color.js @@ -0,0 +1,31 @@ + +const palettes = [ + [[0.5, 0.5, 0.5], [0.5, 0.5, 0.5], [1.0, 1.0, 1.0], [0.00, 0.33, 0.67]], + [[0.5, 0.5, 0.5], [0.5, 0.5, 0.5], [1.0, 1.0, 1.0], [0.00, 0.10, 0.20]], + [[0.5, 0.5, 0.5], [0.5, 0.5, 0.5], [1.0, 1.0, 1.0], [0.30, 0.20, 0.20]], + [[0.5, 0.5, 0.5], [0.5, 0.5, 0.5], [1.0, 1.0, 0.5], [0.80, 0.90, 0.30]], + [[0.5, 0.5, 0.5], [0.5, 0.5, 0.5], [1.0, 0.7, 0.4], [0.00, 0.15, 0.20]], + [[0.5, 0.5, 0.5], [0.5, 0.5, 0.5], [2.0, 1.0, 0.0], [0.50, 0.20, 0.25]], + [[0.8, 0.5, 0.4], [0.2, 0.4, 0.2], [2.0, 1.0, 1.0], [0.00, 0.25, 0.25]], +] + +let palette = palettes[0] + +function channel (t, a, b, c, d, add, mul) { + return a + b * Math.cos(2 * Math.PI * (c * t + d)) * mul + add +} + +function color (t, add, mul) { + let a, b, c, d + const rgb = [] + for (var i = 0; i < 3; i++) { + a = palette[0][i] + b = palette[1][i] + c = palette[2][i] + d = palette[3][i] + rgb[i] = Math.round(channel(t, a, b, c, d, add, mul) * 255) + } + return 'rgb(' + rgb + ')' +} + +export default color diff --git a/client/lib/intonation.js b/client/lib/intonation.js new file mode 100644 index 0000000..341baab --- /dev/null +++ b/client/lib/intonation.js @@ -0,0 +1,162 @@ +module.exports = (function(){ + var Intonation = function(opt){ + opt = this.opt = Object.assign({ + name: "", + root: 466.164, + octave: 0, + interval: 2, + tet: 0, + intervals: null, + }, opt || {}) + this.generate() + } + Intonation.prototype.generate = function(opt){ + opt = Object.assign(this.opt, opt || {}) + if (opt.scl) { + this.generate_scl() + } + else if (opt.tet) { + this.generate_tet() + } + else if (opt.intervals) { + this.generate_intervals() + } + } + Intonation.prototype.generate_intervals = function(){ + var root = this.opt.root + var interval_list = this.opt.intervals + if (typeof interval_list == "string") { + interval_list = interval_list.split(" ") + } + this.name = this.opt.name || "interval list" + this.intervals = interval_list + this.interval = this.opt.interval = parseInterval.call(this, interval_list.pop() ) + this.scale = interval_list.map( parseIntervalString.bind(this) ).filter(function(v){ + return !! v + }) + } + Intonation.prototype.generate_tet = function(){ + var scale = this.scale = [] + var root = this.opt.root + var tet = this.opt.tet + var interval = this.interval = this.opt.interval + var ratio = Math.pow( interval, 1/tet ) + var n = root + scale.push(n) + for (var i = 0; i < tet-1; i++) { + n *= ratio + scale.push(n) + } + this.name = this.opt.name || tet + "-tone equal temperament" + this.intervals = null + } + Intonation.prototype.generate_scl = function(){ + var root = this.opt.root + var scl = this.parse_scl( this.opt.scl ) + this.intervals = scl.notes + this.interval = scl.notes.pop() + this.name = this.opt.name || scl.description + this.scale = scl.notes.map(function(v){ + return v * root + }) + } + Intonation.prototype.parse_scl = function(s){ + var scl = {} + scl.comments = [] + scl.notes = [] + s.trim().split("\n").forEach(function(line){ + // Lines beginning with an exclamation mark are regarded as comments + // and are to be ignored. + if ( line.indexOf("!") !== -1 ) { + scl.comments.push(line) + } + // The first (non comment) line contains a short description of the scale. + // If there is no description, there should be an empty line. (nb: which is falsey) + else if ( ! ('description' in scl) ) { + scl.description = line + } + // The second line contains the number of notes. + // The first note of 1/1 or 0.0 cents is implicit and not in the files. + else if ( ! scl.notes.length) { + scl.notes.push(1) + } + else { + // If the value contains a period, it is a cents value, otherwise a ratio. + var note = line.replace(/^[^-\.0-9]+/,"").replace(/[^-\/\.0-9]+$/,"") + if ( note.indexOf(".") !== -1 ) { + note = Math.pow( 2, (parseFloat(note) / 1200) ) + } + else { + note = parseInterval(note) + } + if (note) { + scl.notes.push(note) + } + } + }) + return scl + } + Intonation.prototype.index = function(i, octave){ + octave = octave || this.opt.octave + var f = this.scale[ mod(i, this.scale.length)|0 ] + var pow = Math.floor(norm(i, 0, this.scale.length)) + octave + f *= Math.pow(this.interval, pow) + return f + } + Intonation.prototype.range = function(min, max){ + var a = [] + for (var i = min; i < max; i++) { + a.push( this.index(i) ) + } + return a + } + Intonation.prototype.set_root = function(f){ + this.opt.root = f + this.generate() + } + Intonation.prototype.quantize_frequency = function(f){ + if (f == 0) return 0 + var scale_f = f + var pow = 0 + var interval = this.interval + var scale = this.scale + while (scale_f < root) { + scale_f *= interval + pow -= 1 + } + while (scale_f > root * interval) { + scale_f /= interval + pow += 1 + } + for (var i = 0; i < scale.length; i++) { + if (scale_f > scale[i]) continue + scale_f = scale[i] + break + } + scale_f *= Math.pow(2, pow) + return scale_f + } + Intonation.prototype.quantize_index = function(i){ + return mod(index-1, this.scale.length)|0 + } + var parseInterval = Intonation.prototype.parse_interval = function (s) { + if (typeof s == "number") return s + if (! s.indexOf("/") == -1) return parseInt(s) + var pp = s.split("/") + var num = parseInt(pp[0]) + var den = parseInt(pp[1]) + if (isNaN(num)) return 1 + if (isNaN(den) || den == 0) return num + if (num == den) return 1 + return num / den + } + var parseIntervalString = Intonation.prototype.parse_interval_string = function(s){ + if (s.indexOf("/") !== -1) return parseInterval(s) * this.opt.root // intervals + if (s.indexOf("f") !== -1) return parseFloat(s) // pure frequencies + return parseFloat(s) + } + function norm(n,a,b){ return (n-a) / (b-a) } + function mod(n,m){ return n-(m * Math.floor(n/m)) } + + return Intonation +})() diff --git a/client/lib/kalimba.js b/client/lib/kalimba.js new file mode 100644 index 0000000..1401dc4 --- /dev/null +++ b/client/lib/kalimba.js @@ -0,0 +1,50 @@ +import Tone from 'tone' +import { choice } from './util' + +const player_count = 2 +let sample_index = 0 + +const compressor = new Tone.Compressor(-30, 3).toMaster() + +const samples = [ + { root: 226, fn: 'samples/380737__cabled-mess__sansula-01-a-raw.mp3', }, + { root: 267, fn: 'samples/380736__cabled-mess__sansula-02-c-raw.mp3', }, + { root: 340, fn: 'samples/380735__cabled-mess__sansula-03-e-raw.mp3', }, + { root: 452, fn: 'samples/380733__cabled-mess__sansula-06-a-02-raw.mp3', }, +// { root: 507, fn: 'samples/380734__cabled-mess__sansula-07-b-h-raw.mp3', }, +// { root: 535, fn: 'samples/380731__cabled-mess__sansula-08-c-raw.mp3', }, +// { root: 671, fn: 'samples/380732__cabled-mess__sansula-09-e-raw.mp3', }, +] + +samples.forEach((sample) => { + sample.players = [] + sample.index = -1 + for (let i = 0; i < player_count; i++) { + let fn = sample.fn + if (window.location.href.match(/asdf.us/)) { + fn = '//asdf.us/kalimba/' + fn + } + let player = new Tone.Player({ + url: fn, + retrigger: true, + playbackRate: 1, + }) + player.connect(compressor) + sample.players.push(player) + } +}) + +function play (freq, volume = 0.0) { + const best = { sample: samples[sample_index] } + sample_index = (sample_index + 1) % samples.length + best.sample.index = (best.sample.index + 1) % player_count + + const player = best.sample.players[ best.sample.index ] + player.playbackRate = freq / best.sample.root + // console.log(player) + player.volume.value = volume + setTimeout(() => { player.start() }, 0) +} + +export default { play } + diff --git a/client/lib/keys.js b/client/lib/keys.js new file mode 100644 index 0000000..c9e51ac --- /dev/null +++ b/client/lib/keys.js @@ -0,0 +1,39 @@ +const keys = {} +const key_numbers = {} +const letters = "zxcvbnmasdfghjklqwertyuiop" +const numbers = "1234567890" + +let callback = function(){} + +letters.toUpperCase().split("").map(function(k,i){ + keys[k.charCodeAt(0)] = i +}) + +numbers.split("").map(function(k,i){ + keys[k.charCodeAt(0)] = i+letters.length + key_numbers[k.charCodeAt(0)] = true +}) + +window.addEventListener("keydown", keydown, true) +function keydown (e) { + if (e.altKey || e.ctrlKey || e.metaKey) { + e.stopPropagation() + return + } + if (document.activeElement instanceof HTMLInputElement && + (e.keyCode in key_numbers)) { + e.stopPropagation() + return + } + if (! (e.keyCode in keys)) return + var index = keys[e.keyCode] + if (e.shiftKey) index += letters.length + index -= 7 + callback(index) +} + +function listen (fn) { + callback = fn +} + +export default { listen }
\ No newline at end of file diff --git a/client/lib/midi.js b/client/lib/midi.js new file mode 100644 index 0000000..99cbc48 --- /dev/null +++ b/client/lib/midi.js @@ -0,0 +1,197 @@ +import Tone from 'tone' +import WebMidi from 'webmidi' +import scales from './scales' +import { ftom, norm, dataURItoBlob } from './util' +import kalimba from './kalimba' +import { saveAs } from 'file-saver/FileSaver' + +import { nx } from './ui' + +let midiDevice +let sendPitchBend = false + +export const MidiWriter = require('midi-writer-js') + +export const note_values = [ + [8, '8 measures', 8 * 512], + [4, '4 measures', 4 * 512], + [2, '2 measures', 2 * 512], + [1, 'whole note', 512], + [1/2, 'half note', 256], + [1/3, 'third note', [170, 170, 171]], + [1/4, 'quarter note', 128], + [1/5, 'fifth note', [51,51,51,51,52]], + [1/6, 'sixth note', [85, 85, 86, 85, 85, 86]], + [1/8, 'eighth note', 64], + [1/10, 'tenth note', [25,26,26,25,26,25,26,26,25,26]], + [1/12, 'twelfth note', [21,21,22, 21,21,22, 21,21,22, 21,21,22]], + [1/16, 'sixteenth note', 32], + [1/32, 'thirtysecond note', 16], +] + +export function midi_init() { + WebMidi.enable(midi_ready) + function midi_ready(err) { + if (err) { + console.error('webmidi failed to initialize') + return + } + if (!WebMidi.outputs.length) { + console.error('no MIDI output found') + return + } + console.log(WebMidi.inputs) + console.log(WebMidi.outputs) + if (WebMidi.outputs.length > 1) { + const filtered = WebMidi.outputs.filter(output => output.name.match(/prodipe/i)) + if (filtered.length) { + // midiDevice = filtered[0] + } + } + // midiDevice = midiDevice || WebMidi.outputs[0] + // console.log(midiDevice.name) + } +} + +/* play a single note */ + +export function play_note(index, duration, channel="all", exporting=false, defer=0){ + // console.log(index) + const scale = scales.current() + const freq = scale.index(index + Math.round(nx.offset.value), nx.octave.value) + let midi_note = ftom(freq) + let cents = midi_note % 1 + if (cents > 0.5) { + midi_note += 1 + cents -= 1 + } + cents *= 2 + midi_note = Math.floor(midi_note) + if ((midiDevice || exporting) && midi_note > 127) return 0 + const note = Tone.Frequency(Math.floor(midi_note), "midi").toNote() + const defer_time = 30000 / Tone.Transport.bpm.value * defer / 128 + if (exporting) { + return note + } + // console.log('defer', defer, defer_time) + if (midiDevice) { + duration = duration || 60000 / Tone.Transport.bpm.value + if (! exporting) { + if (defer) { + setTimeout(() => { + play_midi_note(note, cents, channel, duration) + }, defer) + } else { + play_midi_note(note, cents, channel, duration) + } + } + } + else if (defer) { + setTimeout(() => { + kalimba.play(freq) + }, defer_time) + } else { + kalimba.play(freq) + } + return note +} + +export function play_midi_note(note, cents, channel, duration) { + midiDevice.playNote(note, channel, { duration }) + if (sendPitchBend) { + midiDevice.sendPitchBend(cents, channel) + } +} + +/* play the next note in sequence */ + +export function play_sequence(i, bounds, diff, note_time, channel="all", exporting) { + const { rows, min, max } = bounds + const count = rows.length * rows[0].length + if (i >= count) i = 0 + const y = Math.floor(i / rows[0].length) + const x = i % rows[0].length + // if (!x) console.log(y) + const n = rows[y][x] + i += 1 + if (i >= count) i = 0 + const midi_note = play_note( norm(n, min, max) * nx.multiply.value, note_time, channel, exporting) + return [i, [midi_note], [128]] +} +// [next_i, notes, timings, pedal_note] + +/* play the next row as an interval */ + +export function play_interval_sequence(i, bounds, diff, note_time, channel="all", exporting) { + const { rows, min, max } = bounds + const count = rows.length + if (i >= count) i = 0 + const y = i % count + const row = rows[y] + if (! row) { i = 0; return } + const row_min = Math.min.apply(Math, row) + // const row_max = Math.max.apply(Math, row) + const row_f0 = norm(row_min, min, max) + const row_root = row_f0 * nx.multiply.value + const notes = row.map(n => { + const note = row_root + norm(n - row_min, diff.min, diff.max) * nx.interval.value + play_note(note, note_time, channel, exporting) + }) + i += 1 + return [i, notes] +} + +/* generate a 1-track midi file by calling the play function repeatedly */ + +export function export_pattern_as_midi(dataset, bounds, diff, tempo, timingIndex, play_fn, max_i) { + // const behavior = document.querySelector('#behavior').value + const { rows } = bounds + // let count = behavior === 'sequence' ? rows[0].length * rows.length : rows.length + max_i = max_i || rows[0].length + let notes, timings + let note_time + // let timing = note_values[timingIndex][2] + let pedal_note, pedal_track, next_i + let wait = 0 + let midi_track = new MidiWriter.Track() + midi_track.setTempo(tempo) + if (dataset.pedal) { + pedal_track = new MidiWriter.Track() + pedal_track.setTempo(tempo) + } + for (let i = 0, len = max_i; i < len; i++) { + [next_i, notes, timings, pedal_note] = play_fn(i, bounds, diff, note_time, "all", true) + // if (timing.length) { + // note_time = timing[i % timing.length] + // } else { + // note_time = timing + // } + // midi_track.addEvent(new MidiWriter.NoteEvent({ pitch: notes, duration: 't' + note_time })) + // console.log(i, notes, timings) + if (!notes.length) wait += 128 + for (let j = 0; j < notes.length; j++) { + console.log(i, j, notes[j], timings[j], wait, pedal_note) + let e = { + pitch: notes[j], + duration: 't' + timings[j], + velocity: 50, + } + if (wait) { + e.wait = 't' + wait + } + midi_track.addEvent(new MidiWriter.NoteEvent(e)) + wait = 0 + } + if (dataset.pedal) { + pedal_track.addEvent(new MidiWriter.NoteEvent({ + pitch: pedal_note, + duration: 't128', + velocity: 25, + })) + } + } + let tracks = dataset.pedal ? [midi_track, pedal_track] : [midi_track] + const writer = new MidiWriter.Writer(tracks) + const blob = dataURItoBlob(writer.dataUri()) + saveAs(blob, 'Recording - ' + dataset.name + '.mid') +} diff --git a/client/lib/scales.js b/client/lib/scales.js new file mode 100644 index 0000000..d85fe08 --- /dev/null +++ b/client/lib/scales.js @@ -0,0 +1,299 @@ +import Intonation from './intonation' + +const meantone = `! meanquar.scl +! +1/4-comma meantone scale. Pietro Aaron's temperament (1523) + 12 +! + 76.04900 + 193.15686 + 310.26471 + 5/4 + 503.42157 + 579.47057 + 696.57843 + 25/16 + 889.73529 + 1006.84314 + 1082.89214 + 2/1 +` + +const shares = `! shares.scl +! +A scale based on shares of wealth +! +1. +5. +15. +32. +52. +78. +116. +182. +521. +1000. +` + +const shares_sum = `! shares_sum.scl +! +A scale based on summing shares of wealth +! +1 +6.0 +21.0 +53.0 +105.0 +183.0 +299.0 +481.0 +1002.0 +2/1 +` + +const mavila = `! mavila12.scl +! +A 12-note mavila scale (for warping meantone-based music), 5-limit TOP + 12 +! +-30.99719 + 163.50770 + 358.01258 + 327.01540 + 521.52028 + 490.52310 + 685.02798 + 654.03080 + 848.53568 + 1043.04057 + 1012.04338 + 1206.54826 +` + +const carlos_alpha = `! carlos_alpha.scl +! +Wendy Carlos' Alpha scale with perfect fifth divided in nine + 18 +! + 78.00000 + 156.00000 + 234.00000 + 312.00000 + 390.00000 + 468.00000 + 546.00000 + 624.00000 + 702.00000 + 780.00000 + 858.00000 + 936.00000 + 1014.00000 + 1092.00000 + 1170.00000 + 1248.00000 + 1326.00000 + 1404.00000 +` + +const lamonte = `! young-lm_piano.scl +! +LaMonte Young's Well-Tempered Piano +12 +! +567/512 +9/8 +147/128 +21/16 +1323/1024 +189/128 +3/2 +49/32 +7/4 +441/256 +63/32 +2/1 +` + +const colundi = `! colundi.scl +! +Colundi scale +10 +! +9/8 +171/140 +137/112 +43/35 +3/2 +421/280 +213/140 +263/150 +66/35 +2/1 +` + +const liu_major = `! liu_major.scl +! +Linus Liu's Major Scale, see his 1978 book, "Intonation Theory" + 7 +! + 10/9 + 100/81 + 4/3 + 3/2 + 5/3 + 50/27 + 2/1 +` +const liu_pentatonic = `! liu_pent.scl +! +Linus Liu's "pentatonic scale" + 7 +! + 9/8 + 81/64 + 27/20 + 3/2 + 27/16 + 243/128 + 81/40 +` + +const liu_minor = `! LIU_MINor.scl +! +Linus Liu's Harmonic Minor + 7 +! + 10/9 + 6/5 + 4/3 + 40/27 + 8/5 + 50/27 + 2/1 +` + +const liu_melodic_minor = `! liu_mel.scl +! +Linus Liu's Melodic Minor, use 5 and 7 descending and 6 and 8 ascending + 9 +! + 10/9 + 6/5 + 4/3 + 3/2 + 81/50 + 5/3 + 9/5 + 50/27 + 2/1 +` + +const scales = [ + { + intervals: '1/1 9/8 5/4 4/3 3/2 5/3 15/8 2/1', + name: "harmonic scale", + }, + { + root: 450, + intervals: '1/1 9/8 5/4 4/3 3/2 5/3 15/8 2/1', + name: "harmonic scale @ 450", + }, + { + tet: 5, + }, + { + tet: 12, + }, + { + tet: 17, + }, + { + intervals: '1/1 81/80 33/32 21/20 16/15 12/11 11/10 10/9 9/8 8/7 7/6 32/27 6/5 11/9 5/4 14/11 9/7 21/16 4/3 27/20 11/8 7/5 10/7 16/11 40/27 3/2 32/21 14/9 11/7 8/5 18/11 5/3 27/16 12/7 7/4 16/9 9/5 20/11 11/6 15/8 40/21 64/33 160/81 2/1', + name: "harry partch scale", + }, + { + scl: lamonte, + }, + { + scl: meantone, + }, + { + scl: mavila, + }, + { + scl: carlos_alpha, + }, + { + scl: colundi, + }, + { + scl: shares, + }, + { + scl: shares_sum, + }, + { + scl: liu_major, + }, + { + scl: liu_minor, + }, + { + scl: liu_melodic_minor, + }, + { + scl: liu_pentatonic, + } +].map( (opt) => new Intonation(opt) ) + +let scale = scales[0] +let handleChange = function(){} + +function build () { + scales.forEach( (scale, i) => { + scale.heading = document.createElement('div') + scale.heading.innerHTML = scale.name + scale.heading.classList.add('heading') + scale.heading.addEventListener('click', function(){ + pick(i) + }) + scale_list.appendChild(scale.heading) + }) + pick(0) +} +function build_options(el) { + scales.forEach( (scale, i) => { + const option = document.createElement('option') + option.innerHTML = scale.name + option.value = i + el.appendChild(option) + }) + el.addEventListener('input', function(e){ + pick(e.target.value) + }) + pick(0) +} + +function pick (i) { + if (scale) { + scale.heading && scale.heading.classList.remove('selected') + } + scale = scales[i] + scale.heading && scale.heading.classList.add('selected') + handleChange(scale) +} + +function current () { + return scale +} + +function onChange (fn) { + handleChange = fn +} + +function names () { + return scales.map( scale => scale.name ) +} + + +export default { scales, current, build, build_options, pick, names, onChange } diff --git a/client/lib/startAudioContext.js b/client/lib/startAudioContext.js new file mode 100644 index 0000000..f3a9793 --- /dev/null +++ b/client/lib/startAudioContext.js @@ -0,0 +1,181 @@ +/** + * StartAudioContext.js + * @author Yotam Mann + * @license http://opensource.org/licenses/MIT MIT License + * @copyright 2016 Yotam Mann + */ +(function (root, factory) { + if (typeof define === "function" && define.amd) { + define([], factory); + } else if (typeof module === 'object' && module.exports) { + module.exports = factory(); + } else { + root.StartAudioContext = factory(); + } +}(this, function () { + + /** + * The StartAudioContext object + */ + var StartAudioContext = { + /** + * The audio context passed in by the user + * @type {AudioContext} + */ + context : null, + /** + * The TapListeners bound to the elements + * @type {Array} + * @private + */ + _tapListeners : [], + /** + * Callbacks to invoke when the audio context is started + * @type {Array} + * @private + */ + _onStarted : [], + }; + + + /** + * Set the context + * @param {AudioContext} ctx + * @returns {StartAudioContext} + */ + StartAudioContext.setContext = function(ctx){ + StartAudioContext.context = ctx; + return StartAudioContext; + }; + + /** + * Add a tap listener to the audio context + * @param {Array|Element|String|jQuery} element + * @returns {StartAudioContext} + */ + StartAudioContext.on = function(element){ + if (Array.isArray(element) || (NodeList && element instanceof NodeList)){ + for (var i = 0; i < element.length; i++){ + StartAudioContext.on(element[i]); + } + } else if (typeof element === "string"){ + StartAudioContext.on(document.querySelectorAll(element)); + } else if (element.jquery && typeof element.toArray === "function"){ + StartAudioContext.on(element.toArray()); + } else if (Element && element instanceof Element){ + //if it's an element, create a TapListener + var tap = new TapListener(element, onTap); + StartAudioContext._tapListeners.push(tap); + } + return StartAudioContext; + }; + + /** + * Bind a callback to when the audio context is started. + * @param {Function} cb + * @return {StartAudioContext} + */ + StartAudioContext.onStarted = function(cb){ + //if it's already started, invoke the callback + if (StartAudioContext.isStarted()){ + cb(); + } else { + StartAudioContext._onStarted.push(cb); + } + return StartAudioContext; + }; + + /** + * returns true if the context is started + * @return {Boolean} + */ + StartAudioContext.isStarted = function(){ + return (StartAudioContext.context !== null && StartAudioContext.context.state === "running"); + }; + + /** + * @class Listens for non-dragging tap ends on the given element + * @param {Element} element + * @internal + */ + var TapListener = function(element){ + + this._dragged = false; + + this._element = element; + + this._bindedMove = this._moved.bind(this); + this._bindedEnd = this._ended.bind(this); + + element.addEventListener("touchmove", this._bindedMove); + element.addEventListener("touchend", this._bindedEnd); + element.addEventListener("mouseup", this._bindedEnd); + }; + + /** + * drag move event + */ + TapListener.prototype._moved = function(e){ + this._dragged = true; + }; + + /** + * tap ended listener + */ + TapListener.prototype._ended = function(e){ + if (!this._dragged){ + onTap(); + } + this._dragged = false; + }; + + /** + * remove all the bound events + */ + TapListener.prototype.dispose = function(){ + this._element.removeEventListener("touchmove", this._bindedMove); + this._element.removeEventListener("touchend", this._bindedEnd); + this._element.removeEventListener("mouseup", this._bindedEnd); + this._bindedMove = null; + this._bindedEnd = null; + this._element = null; + }; + + /** + * Invoked the first time of the elements is tapped. + * Creates a silent oscillator when a non-dragging touchend + * event has been triggered. + */ + function onTap(){ + //start the audio context with a silent oscillator + if (StartAudioContext.context && !StartAudioContext.isStarted()){ + var osc = StartAudioContext.context.createOscillator(); + var silent = StartAudioContext.context.createGain(); + silent.gain.value = 0; + osc.connect(silent); + silent.connect(StartAudioContext.context.destination); + var now = StartAudioContext.context.currentTime; + osc.start(now); + osc.stop(now+0.5); + } + + //dispose all the tap listeners + if (StartAudioContext._tapListeners){ + for (var i = 0; i < StartAudioContext._tapListeners.length; i++){ + StartAudioContext._tapListeners[i].dispose(); + } + StartAudioContext._tapListeners = null; + } + //the onstarted callbacks + if (StartAudioContext._onStarted){ + for (var j = 0; j < StartAudioContext._onStarted.length; j++){ + StartAudioContext._onStarted[j](); + } + StartAudioContext._onStarted = null; + } + } + + return StartAudioContext; +})); + + diff --git a/client/lib/ui.js b/client/lib/ui.js new file mode 100644 index 0000000..413ff1c --- /dev/null +++ b/client/lib/ui.js @@ -0,0 +1,50 @@ +import Nexus from 'nexusui' + +export const nx = window.nx = {} + +/* ui - update an int/float value */ + +export function update_value_on_change(el, id, is_int, fn) { + const label = document.querySelector(id + ' + .val') + const update = v => { + label.innerHTML = is_int ? parseInt(v) : v.toFixed(2) + fn && fn(v) + } + el.on('change', update) + update(el.value) + el.update = update +} + +/* ui - update a radio button */ + +export function update_radio_value_on_change(el, id, values, fn) { + let old_v = el.active + const label = document.querySelector(id + ' + .val') + const update = v => { + if (v === -1) { + v = el.active = old_v + } else { + old_v = v + } + label.innerHTML = values[v][1] + fn && fn(v) + } + el.on('change', update) + update(el.active) + el.update = update +} + +/* ui - bind/build a select dropdown */ + +export function build_options(el, lists, fn) { + Object.keys(lists).forEach(key => { + const list = lists[key] + const option = document.createElement('option') + option.innerHTML = list.name + option.value = key + el.appendChild(option) + }) + el.addEventListener('input', function(e){ + fn(e.target.value) + }) +} diff --git a/client/lib/util.js b/client/lib/util.js new file mode 100644 index 0000000..c2483eb --- /dev/null +++ b/client/lib/util.js @@ -0,0 +1,139 @@ +export const isIphone = (navigator.userAgent.match(/iPhone/i)) || (navigator.userAgent.match(/iPod/i)) +export const isIpad = (navigator.userAgent.match(/iPad/i)) +export const isAndroid = (navigator.userAgent.match(/Android/i)) +export const isMobile = isIphone || isIpad || isAndroid +export const isDesktop = ! isMobile + +document.body.classList.add(isMobile ? 'mobile' : 'desktop') + +export const browser = { isIphone, isIpad, isMobile, isDesktop } + +export function choice (a){ return a[ Math.floor(Math.random() * a.length) ] } +export function mod(n,m){ return n-(m * Math.floor(n/m)) } +export function norm(n, min, max){ return (n - min) / (max - min) } + +export function requestAudioContext (fn) { + if (isMobile) { + const container = document.createElement('div') + const button = document.createElement('div') + button.innerHTML = 'Tap to start - please unmute your phone' + Object.assign(container.style, { + position: 'absolute', + width: '100%', + height: '100%', + zIndex: '10000', + top: '0px', + left: '0px', + backgroundColor: 'rgba(0, 0, 0, 0.8)', + }) + Object.assign(button.style, { + position: 'absolute', + left: '50%', + top: '50%', + padding: '20px', + backgroundColor: '#7F33ED', + color: 'white', + fontFamily: 'monospace', + borderRadius: '3px', + transform: 'translate3D(-50%,-50%,0)', + textAlign: 'center', + lineHeight: '1.5', + }) + container.appendChild(button) + document.body.appendChild(container) + StartAudioContext.setContext(Tone.context) + StartAudioContext.on(button) + StartAudioContext.onStarted(_ => { + container.remove() + fn() + }) + } else { + fn() + } +} + +export function dataURItoBlob(dataURI) { + // convert base64 to raw binary data held in a string + // doesn't handle URLEncoded DataURIs - see SO answer #6850276 for code that does this + var byteString = atob(dataURI.split(',')[1]); + + // separate out the mime component + var mimeString = dataURI.split(',')[0].split(':')[1].split(';')[0] + + // write the bytes of the string to an ArrayBuffer + var ab = new ArrayBuffer(byteString.length); + + // create a view into the buffer + var ia = new Uint8Array(ab); + + // set the bytes of the buffer to the correct values + for (var i = 0; i < byteString.length; i++) { + ia[i] = byteString.charCodeAt(i); + } + + // write the ArrayBuffer to a blob, and you're done + var blob = new Blob([ab], {type: mimeString}); + return blob; + +} +export function ftom(f) { + // return (Math.log(f) - Math.log(261.626)) / Math.log(2) + 4.0 + return 69 + 12 * Math.log2(f / 440) +} +export function mtof(m) { + return 440 * Math.pow(2, (m - 69) / 12) +} +export function tap (fn) { + return (e) => { + if (browser.isMobile) fn() + else if (e.press) fn() + } +} + +/* get minimum and maximum variance from row-to-row */ + +export function get_diff_bounds(rows){ + const diffs = rows.map(row => { + const row_min = Math.min.apply(Math, row) + const row_max = Math.max.apply(Math, row) + return row_max - row_min + }) + const min = Math.min.apply(Math, diffs) + const max = Math.max.apply(Math, diffs) + return { min, max } +} + +/* get minimum and maximum values from a dataset */ + +export function get_bounds(dataset){ + let rows = dataset.lines + // rows.forEach(row => row.shift()) + rows = rows.map(a => a.map(n => parseFloat(n))) + const max = rows.reduce((a,b) => { + return b.reduce((z,bb) => { + return Math.max(z, bb) + }, a) + }, -Infinity) + const min = rows.reduce((a,b) => { + return b.reduce((z,bb) => { + return Math.min(z, bb) + }, a) + }, Infinity) + return { rows, max, min } +} + +/* transpose a 2D array */ + +export function transpose(a) { + let i_len = a[0].length + let j_len = a.length + let T = new Array(i_len) + for (let i = 0; i < i_len; i++) { + T[i] = new Array(j_len) + for (var j = 0; j < j_len; j++) { + T[i][j] = a[j][i] + } + } + return T +} + diff --git a/client/old_index.js b/client/old_index.js new file mode 100644 index 0000000..a01339c --- /dev/null +++ b/client/old_index.js @@ -0,0 +1,381 @@ +/* play function for mass shooting data w/ custom timing */ + +// export const note_values = [ +// [8, '8 measures', 8 * 512], +// [4, '4 measures', 4 * 512], +// [2, '2 measures', 2 * 512], +// [1, 'whole note', 512], +// [1/2, 'half note', 256], +// [1/3, 'third note', [170, 170, 171]], +// [1/4, 'quarter note', 128], +// [1/5, 'fifth note', [51,51,51,51,52]], +// [1/6, 'sixth note', [85, 85, 86, 85, 85, 86]], +// [1/8, 'eighth note', 64], +// [1/10, 'tenth note', [25,26,26,25,26,25,26,26,25,26]], +// [1/12, 'twelfth note', [21,21,22, 21,21,22, 21,21,22, 21,21,22]], +// [1/16, 'sixteenth note', 32], +// [1/32, 'thirtysecond note', 16], +// ] + +function play_mass_shootings(i, bounds, diff, note_time, channel="all", exporting) { + const { min, max } = bounds + const total = dataset.dates.length + let pedal_note + let notes = [], midi_notes = [] + let cases = [] + let timings + let week = Math.floor((i)/4) % 4 + let year = Math.floor((i - (4*4*3)) / 48) // + 2 + console.log(year) + let yy = -year + if (year > 0) year += 1 + let this_one = 0 + // console.log(i, mass_i, dataset.dates[mass_i], channel, exporting) + while (i >= dataset.dates[mass_i] && mass_i < total) { + // console.log(i, dataset.dates[mass_i]) + notes.push(dataset.lines[0][mass_i]) + if (dataset.isMass) { + cases.push(dataset.data[mass_i][mass_fields.date] + ' ' + dataset.data[mass_i][mass_fields.case] + + ", " + dataset.data[mass_i][mass_fields.fatalities] + ' dead, ' + dataset.data[mass_i][mass_fields.injured] + ' injured') + } else { + cases.push(dataset.labels[mass_i]) + // console.log(dataset.labels[mass_i]) + } + // console.log('push case', dataset.data[mass_i][mass_fields.date] + ' ' + dataset.data[mass_i][mass_fields.case]) + mass_i += 1 + this_one += 1 + if (this_one >= 4) break + } + switch (notes.length) { + default: + case 0: + break + case 1: + midi_notes.push(play_note( norm(notes[0], min, max) * nx.multiply.value + yy, 128, channel, exporting, 0)) + timings = [128] + break + case 2: + midi_notes.push(play_note( norm(notes[0], min, max) * nx.multiply.value + yy, 64, channel, exporting, 0)) + midi_notes.push(play_note( norm(notes[1], min, max) * nx.multiply.value + yy, 64, channel, exporting, 64)) + timings = [64, 64] + break + case 3: + midi_notes.push(play_note( norm(notes[0], min, max) * nx.multiply.value + yy, 43, channel, exporting, 0)) + midi_notes.push(play_note( norm(notes[1], min, max) * nx.multiply.value + yy, 43, channel, exporting, 43)) + midi_notes.push(play_note( norm(notes[2], min, max) * nx.multiply.value + yy, 42, channel, exporting, 85)) + timings = [43, 43 ,42] + break + case 4: + midi_notes.push(play_note( norm(notes[0], min, max) * nx.multiply.value + yy, 32, channel, exporting, 0)) + midi_notes.push(play_note( norm(notes[1], min, max) * nx.multiply.value + yy, 32, channel, exporting, 32)) + midi_notes.push(play_note( norm(notes[2], min, max) * nx.multiply.value + yy, 32, channel, exporting, 64)) + midi_notes.push(play_note( norm(notes[3], min, max) * nx.multiply.value + yy, 32, channel, exporting, 96)) + timings = [32, 32, 32, 32] + break + case 5: + midi_notes.push(play_note( norm(notes[0], min, max) * nx.multiply.value + yy, 22, channel, exporting, 0)) + midi_notes.push(play_note( norm(notes[1], min, max) * nx.multiply.value + yy, 21, channel, exporting, 22)) + midi_notes.push(play_note( norm(notes[2], min, max) * nx.multiply.value + yy, 21, channel, exporting, 43)) + midi_notes.push(play_note( norm(notes[3], min, max) * nx.multiply.value + yy, 32, channel, exporting, 64)) + midi_notes.push(play_note( norm(notes[4], min, max) * nx.multiply.value + yy, 32, channel, exporting, 96)) + timings = [22, 21, 21, 32, 32] + break + case 6: + midi_notes.push(play_note( norm(notes[0], min, max) * nx.multiply.value + yy, 22, channel, exporting, 0)) + midi_notes.push(play_note( norm(notes[1], min, max) * nx.multiply.value + yy, 21, channel, exporting, 22)) + midi_notes.push(play_note( norm(notes[2], min, max) * nx.multiply.value + yy, 21, channel, exporting, 43)) + midi_notes.push(play_note( norm(notes[3], min, max) * nx.multiply.value + yy, 22, channel, exporting, 64)) + midi_notes.push(play_note( norm(notes[4], min, max) * nx.multiply.value + yy, 21, channel, exporting, 86)) + midi_notes.push(play_note( norm(notes[5], min, max) * nx.multiply.value + yy, 21, channel, exporting, 107)) + timings = [22, 21, 21, 22, 21, 21] + break + case 7: + midi_notes.push(play_note( norm(notes[0], min, max) * nx.multiply.value, 16, channel, exporting, 0)) + midi_notes.push(play_note( norm(notes[1], min, max) * nx.multiply.value, 16, channel, exporting, 16)) + midi_notes.push(play_note( norm(notes[2], min, max) * nx.multiply.value, 16, channel, exporting, 32)) + midi_notes.push(play_note( norm(notes[3], min, max) * nx.multiply.value, 16, channel, exporting, 48)) + midi_notes.push(play_note( norm(notes[4], min, max) * nx.multiply.value, 22, channel, exporting, 64)) + midi_notes.push(play_note( norm(notes[5], min, max) * nx.multiply.value, 21, channel, exporting, 86)) + midi_notes.push(play_note( norm(notes[6], min, max) * nx.multiply.value, 21, channel, exporting, 107)) + timings = [16, 16, 16, 16, 22, 21, 21] + break + case 8: + midi_notes.push(play_note( norm(notes[0], min, max) * nx.multiply.value, 16, channel, exporting, 0)) + midi_notes.push(play_note( norm(notes[1], min, max) * nx.multiply.value, 16, channel, exporting, 16)) + midi_notes.push(play_note( norm(notes[2], min, max) * nx.multiply.value, 16, channel, exporting, 32)) + midi_notes.push(play_note( norm(notes[3], min, max) * nx.multiply.value, 16, channel, exporting, 48)) + midi_notes.push(play_note( norm(notes[4], min, max) * nx.multiply.value, 16, channel, exporting, 64)) + midi_notes.push(play_note( norm(notes[5], min, max) * nx.multiply.value, 16, channel, exporting, 80)) + midi_notes.push(play_note( norm(notes[6], min, max) * nx.multiply.value, 16, channel, exporting, 96)) + midi_notes.push(play_note( norm(notes[7], min, max) * nx.multiply.value, 16, channel, exporting, 112)) + timings = [16, 16, 16, 16, 16, 16, 16, 16] + break + case 9: + midi_notes.push(play_note( norm(notes[0], min, max) * nx.multiply.value, 11, channel, exporting, 0)) + midi_notes.push(play_note( norm(notes[1], min, max) * nx.multiply.value, 11, channel, exporting, 11)) + midi_notes.push(play_note( norm(notes[2], min, max) * nx.multiply.value, 10, channel, exporting, 22)) + midi_notes.push(play_note( norm(notes[3], min, max) * nx.multiply.value, 16, channel, exporting, 32)) + midi_notes.push(play_note( norm(notes[4], min, max) * nx.multiply.value, 16, channel, exporting, 48)) + midi_notes.push(play_note( norm(notes[5], min, max) * nx.multiply.value, 16, channel, exporting, 64)) + midi_notes.push(play_note( norm(notes[6], min, max) * nx.multiply.value, 16, channel, exporting, 80)) + midi_notes.push(play_note( norm(notes[7], min, max) * nx.multiply.value, 16, channel, exporting, 96)) + midi_notes.push(play_note( norm(notes[8], min, max) * nx.multiply.value, 16, channel, exporting, 112)) + timings = [11, 11, 10, 16, 16, 16, 16, 16, 16] + break + case 10: + midi_notes.push(play_note( norm(notes[0], min, max) * nx.multiply.value, 11, channel, exporting, 0)) + midi_notes.push(play_note( norm(notes[1], min, max) * nx.multiply.value, 11, channel, exporting, 11)) + midi_notes.push(play_note( norm(notes[2], min, max) * nx.multiply.value, 10, channel, exporting, 22)) + midi_notes.push(play_note( norm(notes[3], min, max) * nx.multiply.value, 11, channel, exporting, 32)) + midi_notes.push(play_note( norm(notes[4], min, max) * nx.multiply.value, 11, channel, exporting, 43)) + midi_notes.push(play_note( norm(notes[5], min, max) * nx.multiply.value, 10, channel, exporting, 54)) + midi_notes.push(play_note( norm(notes[6], min, max) * nx.multiply.value, 16, channel, exporting, 64)) + midi_notes.push(play_note( norm(notes[7], min, max) * nx.multiply.value, 16, channel, exporting, 80)) + midi_notes.push(play_note( norm(notes[8], min, max) * nx.multiply.value, 16, channel, exporting, 96)) + midi_notes.push(play_note( norm(notes[9], min, max) * nx.multiply.value, 16, channel, exporting, 112)) + timings = [11, 11, 10, 11, 11, 10, 16, 16, 16, 16] + break + case 11: + midi_notes.push(play_note( norm(notes[0], min, max) * nx.multiply.value, 11, channel, exporting, 0)) + midi_notes.push(play_note( norm(notes[1], min, max) * nx.multiply.value, 11, channel, exporting, 11)) + midi_notes.push(play_note( norm(notes[2], min, max) * nx.multiply.value, 10, channel, exporting, 22)) + midi_notes.push(play_note( norm(notes[3], min, max) * nx.multiply.value, 11, channel, exporting, 32)) + midi_notes.push(play_note( norm(notes[4], min, max) * nx.multiply.value, 11, channel, exporting, 43)) + midi_notes.push(play_note( norm(notes[5], min, max) * nx.multiply.value, 10, channel, exporting, 54)) + midi_notes.push(play_note( norm(notes[6], min, max) * nx.multiply.value, 11, channel, exporting, 64)) + midi_notes.push(play_note( norm(notes[7], min, max) * nx.multiply.value, 11, channel, exporting, 75)) + midi_notes.push(play_note( norm(notes[8], min, max) * nx.multiply.value, 10, channel, exporting, 86)) + midi_notes.push(play_note( norm(notes[9], min, max) * nx.multiply.value, 16, channel, exporting, 96)) + midi_notes.push(play_note( norm(notes[10], min, max) * nx.multiply.value, 16, channel, exporting, 112)) + timings = [11, 11, 10, 11, 11, 10, 11, 11, 10, 16, 16] + break + case 12: + midi_notes.push(play_note( norm(notes[0], min, max) * nx.multiply.value, 11, channel, exporting, 0)) + midi_notes.push(play_note( norm(notes[1], min, max) * nx.multiply.value, 11, channel, exporting, 11)) + midi_notes.push(play_note( norm(notes[2], min, max) * nx.multiply.value, 10, channel, exporting, 22)) + midi_notes.push(play_note( norm(notes[3], min, max) * nx.multiply.value, 11, channel, exporting, 32)) + midi_notes.push(play_note( norm(notes[4], min, max) * nx.multiply.value, 11, channel, exporting, 43)) + midi_notes.push(play_note( norm(notes[5], min, max) * nx.multiply.value, 10, channel, exporting, 54)) + midi_notes.push(play_note( norm(notes[6], min, max) * nx.multiply.value, 11, channel, exporting, 64)) + midi_notes.push(play_note( norm(notes[7], min, max) * nx.multiply.value, 11, channel, exporting, 75)) + midi_notes.push(play_note( norm(notes[8], min, max) * nx.multiply.value, 10, channel, exporting, 86)) + midi_notes.push(play_note( norm(notes[9], min, max) * nx.multiply.value, 11, channel, exporting, 96)) + midi_notes.push(play_note( norm(notes[10], min, max) * nx.multiply.value, 11, channel, exporting, 107)) + midi_notes.push(play_note( norm(notes[11], min, max) * nx.multiply.value, 10, channel, exporting, 118)) + timings = [11, 11, 10, 11, 11, 10, 11, 11, 10, 11, 11, 10] + break + } + const pedal_freq = scales.current().index(nx.pedal_tone.value - week + 4 - year, nx.octave.value) + pedal_note = get_midi_note_for_frequency(pedal_freq) + if (!exporting) { + kalimba.play(pedal_freq, -12) + if (cases.length) { + document.querySelector('#cases').innerHTML = cases.join('<br>') + } + } + if (total <= mass_i) { + mass_i = 0 + i = 0 + } else { + i += 1 + } + + if (notes.length) { + return [i, midi_notes, timings, pedal_note] + } + return [i, [], [], pedal_note] +} + +function get_midi_note_for_frequency(freq){ + let midi_note = ftom(freq) + let cents = midi_note % 1 + if (cents > 0.5) { + midi_note += 1 + cents -= 1 + } + cents *= 2 + midi_note = Math.floor(midi_note) + if (midi_note > 127) return 0 + const note = Tone.Frequency(Math.floor(midi_note), "midi").toNote() + // console.log(freq, midi_note, note) + return note +} + +/* play next note according to sonification */ + +function play_next(){ + if (paused) return + let note_time = 120000 / Tone.Transport.bpm.value * note_values[nx.timing.active][0] + clearTimeout(playTimeout) + playTimeout = setTimeout(play_next, note_time) + let [new_i, notes, timings] = play_fn(i, bounds, diff, note_time) + if (dataset.labels) { + // const j = Math.floor(i / bounds.rows[0].length) + // document.querySelector('#cases').innerHTML = dataset.labels[j] + } + + i = new_i + if (recording) { + let timing = note_values[nx.timing.active][2] + if (timing.length) timing = timing[i % timing.length] + recorder.addEvent(new MidiWriter.NoteEvent({ pitch: notes, duration: 't' + timing })) + } +} + +function play() { + i = 0 + mass_i = 0 + paused = false + play_next() +} +function pause() { + paused = true +} + +/* bind selects */ + +function pick_dataset(key){ + console.log('pick dataset:', key, datasets[key]) + i = 0 + mass_i = 0 + window.location.hash = key + dataset = datasets[key] + bounds = get_bounds(dataset) + diff = get_diff_bounds(bounds.rows) + play_fn = dataset.play_fn + max_i = dataset.max_i || (bounds.rows.length * bounds.rows[0].length) + if (dataset.onPick) { + dataset.onPick() + } +} + +/* build and bind the UI */ + +function ready() { + scales.build_options(document.querySelector('#scale')) + build_options(document.querySelector('#dataset'), datasets, pick_dataset) + + const dial_size = [50, 50] + + Tone.Transport.bpm.value = DEFAULT_BPM + nx.tempo = new Nexus.Dial('#tempo', { + size: dial_size, + min: 10, + max: 300, + step: 1, + value: DEFAULT_BPM, + }) + update_value_on_change(nx.tempo, '#tempo', true, v => Tone.Transport.bpm.value = v) + + nx.timing = new Nexus.RadioButton('#timing', { + size: [400,25], + numberOfButtons: note_values.length, + active: 6, + }) + update_radio_value_on_change(nx.timing, '#timing', note_values) + + nx.duration = new Nexus.Dial('#duration', { + size: dial_size, + min: 0, + max: 2, + step: 0.01, + value: 0.8, + }) + update_value_on_change(nx.duration, '#duration', false) + + nx.offset = new Nexus.Dial('#offset', { + size: dial_size, + min: -24, + max: 24, + step: 1, + value: -5, + }) + update_value_on_change(nx.offset, '#offset', true) + + nx.octave = new Nexus.Dial('#octave', { + size: dial_size, + min: -4, + max: 4, + step: 1, + value: 0, + }) + update_value_on_change(nx.octave, '#octave', true) + + nx.multiply = new Nexus.Dial('#multiply', { + size: dial_size, + min: -64, + max: 64, + step: 1, + value: 19, + }) + update_value_on_change(nx.multiply, '#multiply', true) + + nx.interval = new Nexus.Dial('#interval', { + size: dial_size, + min: -64, + max: 64, + step: 1, + value: 10, + }) + update_value_on_change(nx.interval, '#interval', true) + + nx.pedal_tone = new Nexus.Dial('#pedal_tone', { + size: dial_size, + min: -24, + max: 24, + step: 1, + value: -7, + }) + update_value_on_change(nx.pedal_tone, '#pedal_tone', true) + + const play_button = document.querySelector('#play') + play_button.addEventListener('click', () => { + play() + }) + + const pause_button = document.querySelector('#pause') + pause_button.addEventListener('click', () => { + pause() + }) + + const export_midi_button = document.querySelector('#export_midi') + export_midi_button.addEventListener('click', () => { + export_pattern_as_midi(dataset, bounds, diff, nx.tempo.value, nx.timing.active, play_fn, max_i) + }) + + const record_midi_button = document.querySelector('#record_midi') + record_midi_button.addEventListener('click', () => { + if (recording) { + record_midi_button.innerHTML = 'Record MIDI' + document.body.classList.remove('recording') + recording = false + const writer = new MidiWriter.Writer([recorder]) + const blob = dataURItoBlob(writer.dataUri()) + saveAs(blob, 'Recording - ' + dataset.name + '.mid') + } else { + record_midi_button.innerHTML = 'Save Recording' + document.body.classList.add('recording') + recording = true + recorder = new MidiWriter.Track() + recorder.setTempo(nx.tempo.value) + } + }) + + document.querySelector('.loading').classList.remove('loading') + + document.querySelector('#scale').value = '14' + scales.pick(14) + + const initial_dataset = decodeURIComponent(window.location.hash.substr(1)) || 'Mass Shootings' + document.querySelector('#dataset').value = initial_dataset + pick_dataset(initial_dataset) + + // play_next() +} + +/* keys */ + +keys.listen(index => { + nx.offset.value = index + nx.offset.update(index) +}) diff --git a/client/player.js b/client/player.js new file mode 100644 index 0000000..0d7cadc --- /dev/null +++ b/client/player.js @@ -0,0 +1,165 @@ + +var is_mobile = /Android|webOS|iPhone|iPad|iPod|BlackBerry/i.test(navigator.userAgent) +var links = document.querySelectorAll("a") +var audio, music = [], current_index = -1, typing = false +var active = false +// var comment = document.querySelector("#comment") +Array.prototype.slice.apply(links).forEach(function(url){ + if (url.href.match(/\.(mp3|wav|ogg)/i)) { + var index = music.length + if (is_mobile) url.href = url.href.replace(/^https/,"http") + music.push(url) + url.addEventListener("click", function(e){ + if (e.ctrlKey || e.altKey || e.metaKey || e.shiftKey) return + e.preventDefault() + play(index) + }) + } + else if (url.href.match(/(gif|jpe?g|png|bmp)/i)) { + // url.innerHTML = "<img src='" + url.href + "'>" + // url.addEventListener("click", function(e){ + // if (e.ctrlKey || e.altKey || e.metaKey || e.shiftKey) return + // e.preventDefault() + // url.classList.toggle("zoomed") + // }) + } +}) +const el = document.querySelector('.player') +const title_el = el.querySelector('.title') +const icon_el = el.querySelector('.icon') +const pos_el = el.querySelector('.pos') +const track_el = el.querySelector('.track') +const dot_el = el.querySelector('.dot') +const time_el = el.querySelector('.time') +const track_width = track_el.offsetWidth +const track_left = pos_el.offsetLeft +if (music.length) { + audio = document.createElement("audio") + window.a = audio + audio.setAttribute("controls", true) + audio.addEventListener("ended", next) + audio.addEventListener("timeupdate", timeupdate) + // audio.src = music[0].href + // var player = document.querySelector("table") + // player.parentNode.insertBefore(audio, player) + document.body.addEventListener("keydown", keydown) + // comment.addEventListener("focus", focusTextBox) + // comment.addEventListener("blur", blurTextBox) +} +icon_el.addEventListener('click', toggle) + +if (is_mobile) { + pos_el.addEventListener('touchstart', e => mousedown(e.touches[0])) + pos_el.addEventListener('touchmove', e => mousemove(e.touches[0])) + window.addEventListener('touchup', mouseup) +} else { + pos_el.addEventListener('mousedown', mousedown) + pos_el.addEventListener('mousemove', mousemove) + window.addEventListener('mouseup', mouseup) +} +var down = false +var mousex = 0 +function mousedown(e) { + e.preventDefault && e.preventDefault() + down = true + mousex = (e.pageX - track_left) / track_width +} +function mousemove(e) { + if (!down) return + mousex = Math.min(Math.max(0, (e.pageX - track_left) / track_width), 1) + dot_el.style.transform = 'translateX(' + (mousex * track_width) + 'px)' +} +function mouseup(e) { + if (!down) return + down = false + var t = mousex * audio.duration + audio.currentTime = Math.round(t) + dot_el.style.transform = 'translateX(' + (mousex * track_width) + 'px)' +} + +function play(index){ + if (!active) { + active = true + el.classList.add('active') + } + if (index === music.length) return stop(); + if (!icon_el.classList.contains('active')) { + icon_el.classList.add('active') + } + current_index = (index + music.length) % music.length + const a_el = music[current_index] + audio.src = a_el.href + audio.play() + + const header = prevUntil(a_el.parentNode, 'h3') + const title = (index + 1) + '. ' + header.innerText + ': ' + a_el.innerText + title_el.innerHTML = title + var playing = document.querySelector(".playing") + if (playing) playing.classList.remove("playing") + music[current_index].classList.add("playing") +} +function prev(){ + play(current_index - 1) +} +function next(){ + play(current_index + 1) +} +function pause() { + icon_el.classList.remove('active') +} +function stop() { + el.classList.remove('active') + var playing = document.querySelector(".playing") + if (playing) playing.classList.remove("playing") + active = false +} +function toggle(){ + if (audio.paused) { + icon_el.classList.add('active') + audio.play() + } else { + pause() + audio.pause() + } +} +function timeupdate() { + if (down) return + let { currentTime, duration } = audio + let t = currentTime / duration + dot_el.style.transform = 'translateX(' + (track_width * t) + 'px)' + time_el.innerHTML = time(currentTime) + ' / ' + time(duration) +} +function time(n) { + if (!n) return '0:00' + n = Math.floor(n) + let s = n % 60 + if (s < 10) s = '0' + s + let m = Math.floor(n / 60) + return m + ':' + s +} +function keydown(e){ + if (typing || e.ctrlKey || e.altKey || e.metaKey || e.shiftKey) return + switch (e.keyCode) { + case 37: // left + prev() + break; + case 39: // right + next() + break; + case 32: // spacebar + e.preventDefault() + toggle() + break; + } +} + +function prevUntil (elem, selector) { + elem = elem.previousElementSibling; + while (elem) { + if (elem.matches(selector)) break; + elem = elem.previousElementSibling; + } + return elem; +} +// function focusTextBox (){ typing = true } +// function blurTextBox (){ typing = false } |
