import Tone from 'tone' import Nexus from 'nexusui' import keys from './lib/keys' import scales from './lib/scales' import kalimba from './lib/kalimba' import { midi_init, play_note, play_sequence, export_pattern_as_midi, note_values, MidiWriter, } from './lib/midi' import { requestAudioContext, norm, dataURItoBlob, get_bounds, get_diff_bounds, transpose, ftom, } from './lib/util' import { update_value_on_change, update_radio_value_on_change, build_options, nx } from './lib/ui' import * as data from './data' const DEFAULT_BPM = 60 let recorder = null let recording = false let paused = false let playTimeout midi_init() /* initialization */ const mass_fields = [ "date", "timestamp", "fatalities", "injured", "total_victims", "age", "case", "weapon_type", "weapon_details" ].reduce((a,b,i) => { a[b] = i return a }, {}) let i = 0, max_i = 0, mass_i = 0 let datasets = {}, dataset = {}, bounds = {}, diff = [] let play_fn = play_sequence data.load().then(lists => { console.log(lists) const fm = lists.firearms_manufactured datasets['Firearms Manufactured'] = {} datasets['Firearms Manufactured'].name = 'Firearms Manufactured' datasets['Firearms Manufactured'].play_fn = play_sequence datasets['Firearms Manufactured'].h = fm.h.slice(1, 5) datasets['Firearms Manufactured'].labels = fm.lines.map(l => l.slice(0, 1)) datasets['Firearms Manufactured'].lines = fm.lines.map(l => l.slice(1, 5)) transpose(lists.gun_violence_by_month.lines).forEach((row, i) => { const name = lists.gun_violence_by_month.h[i] if (name === 'Date') return // console.log(name, row) datasets[name] = { name, h: [name], lines: [row.map(n => parseInt(n))], play_fn: play_sequence, } }) datasets["Mass Shootings"] = lists.mass_shootings_from_columbine datasets["Mass Shootings"].name = "Mass Shootings" datasets["Mass Shootings"].pedal = true datasets["Mass Shootings"].play_fn = play_mass_shootings const lines = datasets["Mass Shootings"].lines.reverse() const [min_y, ...rest_a] = lines[0][mass_fields.date].split('/') const [max_y, ...rest_b] = lines[lines.length-1][mass_fields.date].split('/') datasets["Mass Shootings"].dates = lines.map(row => { const [y, m, d] = row[mass_fields.date].split('/') return (parseInt(y) - parseInt(min_y)) * 12 + parseInt(m) }) datasets["Mass Shootings"].max_i = (parseInt(max_y) - parseInt(min_y)) * 12 + parseInt(12) // console.log('max i', max_i) datasets["Mass Shootings"].data = lines datasets["Mass Shootings"].lines = [lines.map(row => row[mass_fields.total_victims])] requestAudioContext(ready) }) /* play function for mass shooting data w/ custom timing */ // export const note_values = [ // [8, '8 measures', 8 * 512], // [4, '4 measures', 4 * 512], // [2, '2 measures', 2 * 512], // [1, 'whole note', 512], // [1/2, 'half note', 256], // [1/3, 'third note', [170, 170, 171]], // [1/4, 'quarter note', 128], // [1/5, 'fifth note', [51,51,51,51,52]], // [1/6, 'sixth note', [85, 85, 86, 85, 85, 86]], // [1/8, 'eighth note', 64], // [1/10, 'tenth note', [25,26,26,25,26,25,26,26,25,26]], // [1/12, 'twelfth note', [21,21,22, 21,21,22, 21,21,22, 21,21,22]], // [1/16, 'sixteenth note', 32], // [1/32, 'thirtysecond note', 16], // ] function play_mass_shootings(i, bounds, diff, note_time, channel="all", exporting) { const { rows, min, max } = bounds const y = 0 const x = i % rows[0].length const n = rows[y][x] const total = dataset.dates.length let pedal_note let notes = [], midi_notes = [], cases = [], timings // console.log(i, mass_i, dataset.dates[mass_i], channel, exporting) while (i >= dataset.dates[mass_i] && mass_i < total) { notes.push(dataset.lines[0][mass_i]) cases.push(dataset.data[mass_i][mass_fields.date] + ' ' + dataset.data[mass_i][mass_fields.case] + ", " + dataset.data[mass_i][mass_fields.fatalities] + ' dead, ' + dataset.data[mass_i][mass_fields.injured] + ' injured') // console.log('push case', dataset.data[mass_i][mass_fields.date] + ' ' + dataset.data[mass_i][mass_fields.case]) mass_i += 1 } switch (notes.length) { default: case 0: break case 1: midi_notes.push(play_note( norm(notes[0], min, max) * nx.multiply.value, 128, channel, exporting, 0)) timings = [128] break case 2: midi_notes.push(play_note( norm(notes[0], min, max) * nx.multiply.value, 64, channel, exporting, 0)) midi_notes.push(play_note( norm(notes[1], min, max) * nx.multiply.value, 64, channel, exporting, 64)) timings = [64, 64] break case 3: midi_notes.push(play_note( norm(notes[0], min, max) * nx.multiply.value, 43, channel, exporting, 0)) midi_notes.push(play_note( norm(notes[1], min, max) * nx.multiply.value, 43, channel, exporting, 43)) midi_notes.push(play_note( norm(notes[2], min, max) * nx.multiply.value, 42, channel, exporting, 85)) timings = [43, 43 ,42] break case 4: midi_notes.push(play_note( norm(notes[0], min, max) * nx.multiply.value, 32, channel, exporting, 0)) midi_notes.push(play_note( norm(notes[1], min, max) * nx.multiply.value, 32, channel, exporting, 32)) midi_notes.push(play_note( norm(notes[2], min, max) * nx.multiply.value, 32, channel, exporting, 64)) midi_notes.push(play_note( norm(notes[3], min, max) * nx.multiply.value, 32, channel, exporting, 96)) timings = [32, 32, 32, 32] break } const pedal_freq = scales.current().index(nx.pedal_tone.value, nx.octave.value) pedal_note = get_midi_note_for_frequency(pedal_freq) if (!exporting) { kalimba.play(pedal_freq, -12) if (cases.length) { document.querySelector('#cases').innerHTML = cases.join('
') } } if (total <= mass_i) { mass_i = 0 i = 0 } else { i += 1 } if (notes.length) { return [i, midi_notes, timings, pedal_note] } return [i, [], [], pedal_note] } function get_midi_note_for_frequency(freq){ let midi_note = ftom(freq) let cents = midi_note % 1 if (cents > 0.5) { midi_note += 1 cents -= 1 } cents *= 2 midi_note = Math.floor(midi_note) if (midi_note > 127) return 0 const note = Tone.Frequency(Math.floor(midi_note), "midi").toNote() console.log(freq, midi_note, note) return note } /* play next note according to sonification */ function play_next(){ if (paused) return let note_time = 120000 / Tone.Transport.bpm.value * note_values[nx.timing.active][0] clearTimeout(playTimeout) playTimeout = setTimeout(play_next, note_time) let [new_i, notes, timings] = play_fn(i, bounds, diff, note_time) if (dataset.labels) { const j = Math.floor(i / bounds.rows[0].length) document.querySelector('#cases').innerHTML = dataset.labels[j] } i = new_i if (recording) { let timing = note_values[nx.timing.active][2] if (timing.length) timing = timing[i % timing.length] recorder.addEvent(new MidiWriter.NoteEvent({ pitch: notes, duration: 't' + timing })) } } function play() { i = 0 mass_i = 0 paused = false play_next() } function pause() { paused = true } /* bind selects */ function pick_dataset(key){ console.log('pick dataset:', key, datasets[key]) i = 0 mass_i = 0 dataset = datasets[key] bounds = get_bounds(dataset) diff = get_diff_bounds(bounds.rows) play_fn = dataset.play_fn max_i = dataset.max_i || (bounds.rows.length * bounds.rows[0].length) } /* build and bind the UI */ function ready() { scales.build_options(document.querySelector('#scale')) build_options(document.querySelector('#dataset'), datasets, pick_dataset) const dial_size = [50, 50] Tone.Transport.bpm.value = DEFAULT_BPM nx.tempo = new Nexus.Dial('#tempo', { size: dial_size, min: 10, max: 300, step: 1, value: DEFAULT_BPM, }) update_value_on_change(nx.tempo, '#tempo', true, v => Tone.Transport.bpm.value = v) nx.timing = new Nexus.RadioButton('#timing', { size: [400,25], numberOfButtons: note_values.length, active: 6, }) update_radio_value_on_change(nx.timing, '#timing', note_values) nx.duration = new Nexus.Dial('#duration', { size: dial_size, min: 0, max: 2, step: 0.01, value: 0.8, }) update_value_on_change(nx.duration, '#duration', false) nx.offset = new Nexus.Dial('#offset', { size: dial_size, min: -24, max: 24, step: 1, value: -5, }) update_value_on_change(nx.offset, '#offset', true) nx.octave = new Nexus.Dial('#octave', { size: dial_size, min: -4, max: 4, step: 1, value: 0, }) update_value_on_change(nx.octave, '#octave', true) nx.multiply = new Nexus.Dial('#multiply', { size: dial_size, min: -64, max: 64, step: 1, value: 19, }) update_value_on_change(nx.multiply, '#multiply', true) nx.interval = new Nexus.Dial('#interval', { size: dial_size, min: -64, max: 64, step: 1, value: 10, }) update_value_on_change(nx.interval, '#interval', true) nx.pedal_tone = new Nexus.Dial('#pedal_tone', { size: dial_size, min: -24, max: 24, step: 1, value: -7, }) update_value_on_change(nx.pedal_tone, '#pedal_tone', true) const play_button = document.querySelector('#play') play_button.addEventListener('click', () => { play() }) const pause_button = document.querySelector('#pause') pause_button.addEventListener('click', () => { pause() }) const export_midi_button = document.querySelector('#export_midi') export_midi_button.addEventListener('click', () => { export_pattern_as_midi(dataset, bounds, diff, nx.tempo.value, nx.timing.active, play_fn, max_i) }) const record_midi_button = document.querySelector('#record_midi') record_midi_button.addEventListener('click', () => { if (recording) { record_midi_button.innerHTML = 'Record MIDI' document.body.classList.remove('recording') recording = false const writer = new MidiWriter.Writer([recorder]) const blob = dataURItoBlob(writer.dataUri()) saveAs(blob, 'Recording - ' + dataset.name + '.mid') } else { record_midi_button.innerHTML = 'Save Recording' document.body.classList.add('recording') recording = true recorder = new MidiWriter.Track() recorder.setTempo(nx.tempo.value) } }) document.querySelector('.loading').classList.remove('loading') document.querySelector('#dataset').value = 'Mass Shootings' pick_dataset('Mass Shootings') document.querySelector('#scale').value = '14' scales.pick(14) // play_next() } /* keys */ keys.listen(index => { nx.offset.value = index nx.offset.update(index) })