diff options
| author | Jules Laplace <julescarbon@gmail.com> | 2018-06-05 23:56:12 +0200 |
|---|---|---|
| committer | Jules Laplace <julescarbon@gmail.com> | 2018-06-05 23:56:12 +0200 |
| commit | 8c8e2e08d2ae89ba18ca05bab446e4642798dce2 (patch) | |
| tree | d3153ba1fc32faa19d4094b2b5a18943f1e45dfa /app | |
| parent | c4dd63e487588cfa8e104a840af75854972b5ddb (diff) | |
pulling in wav2pix code
Diffstat (limited to 'app')
| -rw-r--r-- | app/client/audio/draw.js | 270 | ||||
| -rw-r--r-- | app/client/audio/index.js | 2 | ||||
| -rw-r--r-- | app/client/audio/output.js | 8 | ||||
| -rw-r--r-- | app/client/audio/spectrum.js | 278 | ||||
| -rw-r--r-- | app/client/audio/startAudioContext.js | 179 | ||||
| -rw-r--r-- | app/client/audio/wav2pix.js | 153 | ||||
| -rw-r--r-- | app/client/util/format.js | 132 | ||||
| -rw-r--r-- | app/client/util/index.js | 146 | ||||
| -rw-r--r-- | app/client/util/math.js | 52 |
9 files changed, 1079 insertions, 141 deletions
diff --git a/app/client/audio/draw.js b/app/client/audio/draw.js new file mode 100644 index 0000000..8caf8d8 --- /dev/null +++ b/app/client/audio/draw.js @@ -0,0 +1,270 @@ +import { + browser, requestAudioContext, + randint, randrange, clamp, mod, +} from './lib/util' + +import './lib/vendor/hidpi-canvas' + +import mouse from './lib/mouse' +import color from './lib/color' + +let w, h +let rx, ry + +const pixels_per_second = 512 // 1024 + +const canvas = document.createElement('canvas') +// document.body.appendChild(canvas) +// document.body.addEventListener('resize', resize) +resize() +recenter() +requestAnimationFrame(animate) + +// must request context after resizing +const ctx = canvas.getContext('2d') + +const scratch = document.createElement('canvas') +const scratchCtx = scratch.getContext('2d-lodpi') + +function resize(ww, hh){ + w = canvas.width = ww || window.innerWidth + h = canvas.height = hh || window.innerHeight + canvas.style.width = w + 'px' + canvas.style.height = h + 'px' +} +function recenter(){ + rx = randint(w), ry = randint(h) +} +let frame = null +function onFrame(fn){ + frame = fn +} +function animate(t){ + requestAnimationFrame(animate) + if (frame) { + frame(t) + frame = null + } + // ctx.save() + // ctx.globalAlpha = 0.0001 + // ctx.translate(w/2, h/2) + // ctx.rotate(0.1) + // ctx.translate(-rx, -ry) + // ctx.drawImage(canvas, 0, 0) + // ctx.restore() +} +function clear(n, x, y, ww, hh){ + ctx.fillStyle = 'rgba(255,255,255,' + (n || 0.9) + ')' + ctx.fillRect(x || 0, y || 0, ww || w, hh || h) + recenter() +} +function triangle(px,py,r){ + setTimeout( () => tri(px,py,r), Math.random()*10) + // setTimeout( () => tri(px,py,r), Math.random()*200) + // setTimeout( () => tri(px,py,r), Math.random()*300) +} +function tri(px, py, r) { + ctx.save() + ctx.globalCompositeOperation = 'multiply' + ctx.fillStyle = color.color((px+py)/(w+h), 0, 1, 0.2) + function p(){ + let theta = randrange(0, Math.PI*2) + let x = px + Math.cos(theta) * r + let y = py + Math.sin(theta) * r + return { x, y } + } + ctx.beginPath() + const p0 = p(), p1 = p(), p2 = p() + ctx.moveTo(p0.x, p0.y) + ctx.lineTo(p1.x, p1.y) + ctx.lineTo(p2.x, p2.y) + ctx.lineTo(p0.x, p0.y) + ctx.fill() + ctx.restore() +} +function line(y){ + ctx.beginPath() + ctx.moveTo(0, y) + ctx.lineTo(w, y) + ctx.strokeStyle = "#888" + ctx.strokeWidth = 1 + ctx.stroke() +} +function dot(x, y, r){ + ctx.fillStyle = "#f00" + ctx.beginPath() + ctx.moveTo(x, y) + ctx.arc(x, y, r, 0, 2*Math.PI) + ctx.fill() +} +function waveform(pcm, sr, pos, zoom){ + sr = sr || 44100 + pos = pos || 0 + + var width = w + var height = Math.floor(h/4) + var half_height = Math.floor(height/2) + var x0 = 0 + var y0 = 20 + var ymid = y0 + half_height + var max_width_in_seconds = width / pixels_per_second + var max_width_in_samples = max_width_in_seconds * sr + var pcm_length = pcm.length + var len = Math.min(pcm_length, max_width_in_samples) + var pcm_step = sr / pixels_per_second + var i + ctx.save() + + clear(1, x0, y0, width, height) + + line(ymid) + ctx.beginPath() + for (i = 0; i < width; i += 0.5) { + var si = Math.floor(pcm_step * i + pos) + if (si > pcm_length) break + var val = pcm[si] // -1, 1 + // ctx.moveTo(x0 + i, ymid) + ctx.lineTo(x0 + i, ymid + val * half_height) + } + ctx.strokeStyle = "rgba(250,20,0,0.9)" + ctx.strokeWidth = 1 + ctx.stroke() + ctx.restore() +} + +function spectrum(spec, x0, y0, ww, hh){ + const data = spec.data + const fft_size = spec.fft_size + const half_fft_size = spec.fft_size / 2 + const spec_len = data.length + + scratch.width = data.length + scratch.height = half_fft_size + + var imageData = ctx.createImageData(scratch.width, scratch.height) + var pixels = imageData.data + + let i, j, u, v, _r, _i, col, hsl + + for (i = 0; i < spec_len; i++) { + col = data[i] + + for (j = 0; j < half_fft_size; j++) { + u = ((half_fft_size - j) * spec_len + i) * 4 + v = j * 2 + _r = col[v] + _i = mod(col[v+1], Math.PI*2) / (Math.PI*2) + hsl = color.hsl2rgb((_i + 1) / 2, 1.0, 1 - Math.abs(_r / 10)) + // red - real part + // pixels[u] = _r * 127 + 127 + // // green - imag part + // pixels[u+1] = _i * 127 + 127 + // // blue - magnitude + // pixels[u+2] = Math.sqrt(Math.pow(_r, 2) + Math.pow(_i, 2)) * 128 + 127 + // pixels[u+3] = 255 + pixels[u] = hsl[0] + pixels[u+1] = hsl[1] + pixels[u+2] = hsl[2] + pixels[u+3] = 255 + } + } + + scratchCtx.putImageData(imageData, 0, 0) + + var pcm_length = spec.fft_overlap * spec_len + + x0 = x0 * devicePixelRatio || 0 + y0 = y0 * devicePixelRatio || Math.floor(h/4) + ww = ww * devicePixelRatio || w + hh = hh * devicePixelRatio || h/4 + + const width = Math.round(pcm_length / spec.sr * pixels_per_second) + const height = Math.floor(hh) + + ctx.save() + clear(1, x0, y0, w, height) + ctx.drawImage(scratch, x0, y0, width, height) + ctx.restore() +} +function raw_spectrum(spec, x0, y0, ww, hh, def_min_r, def_min_i){ + const data = spec.data + const fft_size = spec.fft_size + const half_fft_size = spec.fft_size / 2 + const spec_len = data.length + + const _scratch = document.createElement('canvas') + const _scratchCtx = _scratch.getContext('2d-lodpi') + _scratch.width = data.length + _scratch.height = half_fft_size + // console.log("spectrum w/h:", _scratch.width, _scratch.height) + + var imageData = _scratchCtx.createImageData(_scratch.width, _scratch.height) + var pixels = imageData.data + + let i, j, u, v, _r, _i, col, hsl + // let min_r = Infinity, max_r = -Infinity + // let min_i = Infinity, max_i = -Infinity + + // determined empirically.. + // let min_r = -60.4894057005308 + // let max_r = 107.23800966675353 + // let min_i = -59.4894057005308 + // let max_i = 108.23800966675353 + let min_r = -def_min_r + let max_r = def_min_r + let min_i = -def_min_i + let max_i = def_min_i + let delta_r = max_r - min_r + let delta_i = max_i - min_i + let mean_r = 0 + let mean_i = 0 + let sum_mean_r = 0, sum_mean_i = 0 + let real, imag + + for (i = 0; i < spec_len; i++) { + col = data[i] + mean_r = 0 + mean_i = 0 + + for (j = 0; j < half_fft_size; j++) { + u = (j * spec_len + i) * 4 + v = j * 2 + real = col[v] + imag = col[v+1] + mean_r += real + mean_i += imag + _r = clamp((real - min_r) / delta_r * 255, 0, 255) + _i = clamp((imag - min_i) / delta_i * 255, 0, 255) + + // hsl = color.hsl2rgb((_i + 1) / 2, 1.0, 1 - Math.abs(_r / 10)) + pixels[u+0] = _r + pixels[u+1] = _i + pixels[u+2] = 127 // hsl[2] + pixels[u+3] = 255 + + // min_r = Math.min(min_r, col[v]) + // max_r = Math.max(max_r, col[v]) + // min_i = Math.min(min_i, col[v]+1) + // max_i = Math.max(max_i, col[v]+1) + } + mean_r /= half_fft_size + mean_i /= half_fft_size + sum_mean_r += mean_r + sum_mean_i += mean_i + } + + sum_mean_r /= spec_len + sum_mean_i /= spec_len + // console.log(sum_mean_r, sum_mean_i) + // console.log("r:", min_r, max_r) + // console.log("i:", min_i, max_i) + _scratchCtx.putImageData(imageData, 0, 0) + + return { canvas: _scratch, imageData } +} + +export default { + canvas, ctx, onFrame, resize, + triangle, clear, line, dot, + waveform, spectrum, raw_spectrum, +}
\ No newline at end of file diff --git a/app/client/audio/index.js b/app/client/audio/index.js new file mode 100644 index 0000000..8b3da74 --- /dev/null +++ b/app/client/audio/index.js @@ -0,0 +1,2 @@ +import Tone from 'tone' +import StartAudioContext from './startAudioContext' diff --git a/app/client/audio/output.js b/app/client/audio/output.js new file mode 100644 index 0000000..53901b3 --- /dev/null +++ b/app/client/audio/output.js @@ -0,0 +1,8 @@ +import Tone from 'tone' + +// const compressor = new Tone.Compressor(-30, 3).toMaster() + +const compressor = new Tone.Compressor(-30, 3).toMaster() +const gain = new Tone.Gain(1).connect(compressor) + +export default gain diff --git a/app/client/audio/spectrum.js b/app/client/audio/spectrum.js new file mode 100644 index 0000000..f4a5444 --- /dev/null +++ b/app/client/audio/spectrum.js @@ -0,0 +1,278 @@ +import Tone from 'tone' + +import { shuffle, quantize, mod } from '../util' + +import { windows as signalWindows } from 'signal-windows' +import FFTJS from 'fft.js' + +const fft_size = 512 +const fft_overlap = fft_size / 4 + +const fft = new FFTJS(fft_size) + +function toSpectrum(pcm, sr){ + sr = sr || 44100 + const ham = signalWindows.construct('ham', fft_size) + const pcm_in = new Array(fft_size) + const pcm_length = pcm.length + const pcm_q_length = Math.ceil(pcm_length / fft_size) * fft_size + let i, j, fft_out, data = []; + for (i = -fft_size; i < pcm_q_length; i += fft_overlap) { + for (j = 0; j < fft_size; j++) { + pcm_in[j] = pcm[i+j] * ham[j] || 0 + } + fft_out = fft.createComplexArray() + fft.realTransform(fft_out, pcm_in) + fft.completeSpectrum(fft_out) + data.push(fft_out) + } + return { + data, + sr, + fft_size, + fft_overlap, + } +} + +function fromSpectrum(spec){ + const data = spec.data + const sr = spec.sr + const fft_size = spec.fft_size + const fft_overlap = spec.fft_overlap + const spec_len = data.length + + const ham = signalWindows.construct('ham', fft_size) + const out = fft.createComplexArray() + const pcm_length = fft_overlap * spec_len + + const audioBuffer = Tone.context.createBuffer(1, pcm_length, sr) + const pcm = audioBuffer.getChannelData(0); + + let i, j, u, col + + for (i = 0; i < spec_len; i++) { + col = data[i] + // for (j = fft_size; j < fft_size << 1; j++) { + // col[j] = 0 + // } + // if (i == 0) console.log(col) + fft.inverseTransform(out, col) + u = i * (fft_overlap) + for (j = 0; j < fft_size; j++) { + pcm[u+j] += out[j*2] * ham[j] || 0 + } + } + + fadeInOut(pcm, fft_size) + // console.log(pcm) + return audioBuffer +} + +function fromImageData(imageData, sr, _r, _i) { + const pixels = imageData.data + const w = imageData.width + const h = imageData.height + let data = new Array(w) + let x, y, u, v, v2 + for (y = 0; y < h; y++) { + let col = data[y] = new Float32Array(h * 4) + for (x = 0; x < w; x++) { + u = (x * (w) + y) * 4 + v = x * 2 + col[v] = (pixels[u] / 255 - 0.5) * _r + col[v+1] = (pixels[u+1] / 255 - 0.5) * _i + v2 = (h-y + h) * 2 + col[v2] = col[v] + col[v2+1] = 0 // col[v+1] + } + col[h*2] = col[h*2+1] = col[h*2-1] = col[h*2-2] = 0 + } + const spec = { + data, + sr, + fft_size, fft_overlap + } + return spec +} + +function binToHz(spec, i){ + return (i / spec.fft_size) * spec.sr +} + +function fadeInOut(pcm, fade_size){ + const pcm_length = pcm.length + let fade = 0, i + for (i = 0; i < fade_size; i++) { + fade = i / (fade_size) + fade *= fade + pcm[i] *= fade + pcm[pcm_length - i] *= fade + } +} +function rotatePhase(spec, theta){ + let { data, fft_size } = spec + let i, j, col, len = data.length + for (i = 0; i < len; i++) { + col = data[i] + for (j = 0; j < fft_size; j++) { + col[j*2+1] += theta + } + } + return spec +} + +function linearBins(spec, n){ + n = n || 1 + + let bins = [], i, q_i + for (q_i = 0; q_i < n; q_i++) { + bins[q_i] = [] + } + const step = Math.floor(spec.fft_size / n) + const len_quantize_n = quantize(spec.fft_size, n) + for (i = 0; i < len_quantize_n; i++) { + q_i = Math.floor(i/step) + bins[q_i] = bins[q_i] || [] + bins[q_i].push(i) + } + // leftover bins get put at end + for (; i < spec.fft_size; i++) { + bins[q_i].push(i) + } + return bins +} +function logarithmicBins(spec){ + let bins = [], i, j, q_i + let binCount = Math.log2(spec.fft_size) - 1 + for (i = 0, q_i = 0, j = 0; i < binCount; i++) { + j += 1 << i + bins[i] = [] + for (; q_i < j; q_i++) { + bins[i].push(q_i) + } + } + return bins +} +function concatBins(bins){ + return bins.reduce((acc, cv) => acc.concat(cv), []) +} +function reverseBins(bins){ + return bins.map( bin => bin.reverse() ) +} +function minBins(bins){ + return bins.map( bin => { + const b = bin[0] + return bin.map(() => b) + }) +} +function maxBins(bins){ + return bins.map( bin => { + const b = bin[bin.length-1] + return bin.map(() => b) + }) +} +function rotateSpectrum(spec, n){ + const { fft_size } = spec + if (n && n < 1) { + n -= 0.5 + n *= fft_size + } + n = Math.floor(n) + let order = new Array(fft_size), i + for (i = 0; i < fft_size; i++) { + order[i] = mod(i + n, fft_size/2) + } + return reorderBins(spec, order) +} +function cloneSpectrum(spec){ + const { + data, + fft_size, + sr, fft_overlap + } = spec + const spec_len = data.length + + let new_data = new Array(spec_len) + let i + for (i = 0; i < spec_len; i++) { + new_data[i] = data[i].concat() + new_data[i][2] = 0 + } + + return { + data: new_data, + fft_size, + sr, fft_overlap, + } +} +function reverseSpectrum(spec){ + let new_spec = cloneSpectrum(spec) + new_spec.data = new_spec.data.reverse() + return new_spec +} +function shuffleSpectrum(spec){ + const { fft_size } = spec + let order = new Array(fft_size), i + for (i = 0; i < fft_size; i++) { + order[i] = i + } + shuffle(order) + return reorderBins(spec, order) +} +function invertSpectrum(spec){ + const { fft_size } = spec + let order = new Array(fft_size), i + for (i = 0; i < fft_size; i++) { + order[i] = fft_size - i - 1 + } + return reorderBins(spec, order) +} +function reorderBins(spec, order){ + let new_spec = cloneSpectrum(spec) + const { + data, + sr, + fft_size, + fft_overlap, + } = spec + const spec_len = data.length + const { data: new_data } = new_spec + + let i, j, col, new_col + for (j = order.length; j < fft_size; j++) { + order[j] = j + } + + for (i = 0; i < spec_len; i++) { + col = data[i] + new_col = new_data[i] = data[i].concat() + col[0] = 0 + col[2] = 0 + col[4] = 0 + for (j = 0; j < fft_size/2; j++) { + new_col[j*2] = col[order[j]*2] + new_col[j*2+1] = col[order[j]*2+1] + } + for (; j < fft_size; j++) { + new_col[j*2] = 0 + new_col[j*2+1] = 0 + } + } + + return { + data: new_data, + sr, fft_size, fft_overlap, + } +} + +export default { + toSpectrum, fromSpectrum, fromImageData, binToHz, + fadeInOut, + cloneSpectrum, + reverseSpectrum, shuffleSpectrum, invertSpectrum, rotateSpectrum, + reorderBins, + linearBins, logarithmicBins, + concatBins, + reverseBins, minBins, maxBins, + rotatePhase, +} diff --git a/app/client/audio/startAudioContext.js b/app/client/audio/startAudioContext.js new file mode 100644 index 0000000..0e257be --- /dev/null +++ b/app/client/audio/startAudioContext.js @@ -0,0 +1,179 @@ +/** + * StartAudioContext.js + * @author Yotam Mann + * @license http://opensource.org/licenses/MIT MIT License + * @copyright 2016 Yotam Mann + */ +(function (root, factory) { + if (typeof define === "function" && define.amd) { + define([], factory); + } else if (typeof module === 'object' && module.exports) { + module.exports = factory(); + } else { + root.StartAudioContext = factory(); + } +}(this, function () { + + /** + * The StartAudioContext object + */ + var StartAudioContext = { + /** + * The audio context passed in by the user + * @type {AudioContext} + */ + context : null, + /** + * The TapListeners bound to the elements + * @type {Array} + * @private + */ + _tapListeners : [], + /** + * Callbacks to invoke when the audio context is started + * @type {Array} + * @private + */ + _onStarted : [], + }; + + + /** + * Set the context + * @param {AudioContext} ctx + * @returns {StartAudioContext} + */ + StartAudioContext.setContext = function(ctx){ + StartAudioContext.context = ctx; + return StartAudioContext; + }; + + /** + * Add a tap listener to the audio context + * @param {Array|Element|String|jQuery} element + * @returns {StartAudioContext} + */ + StartAudioContext.on = function(element){ + if (Array.isArray(element) || (NodeList && element instanceof NodeList)){ + for (var i = 0; i < element.length; i++){ + StartAudioContext.on(element[i]); + } + } else if (typeof element === "string"){ + StartAudioContext.on(document.querySelectorAll(element)); + } else if (element.jquery && typeof element.toArray === "function"){ + StartAudioContext.on(element.toArray()); + } else if (Element && element instanceof Element){ + //if it's an element, create a TapListener + var tap = new TapListener(element, onTap); + StartAudioContext._tapListeners.push(tap); + } + return StartAudioContext; + }; + + /** + * Bind a callback to when the audio context is started. + * @param {Function} cb + * @return {StartAudioContext} + */ + StartAudioContext.onStarted = function(cb){ + //if it's already started, invoke the callback + if (StartAudioContext.isStarted()){ + cb(); + } else { + StartAudioContext._onStarted.push(cb); + } + return StartAudioContext; + }; + + /** + * returns true if the context is started + * @return {Boolean} + */ + StartAudioContext.isStarted = function(){ + return (StartAudioContext.context !== null && StartAudioContext.context.state === "running"); + }; + + /** + * @class Listens for non-dragging tap ends on the given element + * @param {Element} element + * @internal + */ + var TapListener = function(element){ + + this._dragged = false; + + this._element = element; + + this._bindedMove = this._moved.bind(this); + this._bindedEnd = this._ended.bind(this); + + element.addEventListener("touchmove", this._bindedMove); + element.addEventListener("touchend", this._bindedEnd); + element.addEventListener("mouseup", this._bindedEnd); + }; + + /** + * drag move event + */ + TapListener.prototype._moved = function(e){ + this._dragged = true; + }; + + /** + * tap ended listener + */ + TapListener.prototype._ended = function(e){ + if (!this._dragged){ + onTap(); + } + this._dragged = false; + }; + + /** + * remove all the bound events + */ + TapListener.prototype.dispose = function(){ + this._element.removeEventListener("touchmove", this._bindedMove); + this._element.removeEventListener("touchend", this._bindedEnd); + this._element.removeEventListener("mouseup", this._bindedEnd); + this._bindedMove = null; + this._bindedEnd = null; + this._element = null; + }; + + /** + * Invoked the first time of the elements is tapped. + * Creates a silent oscillator when a non-dragging touchend + * event has been triggered. + */ + function onTap(){ + //start the audio context with a silent oscillator + if (StartAudioContext.context && !StartAudioContext.isStarted()){ + var osc = StartAudioContext.context.createOscillator(); + var silent = StartAudioContext.context.createGain(); + silent.gain.value = 0; + osc.connect(silent); + silent.connect(StartAudioContext.context.destination); + var now = StartAudioContext.context.currentTime; + osc.start(now); + osc.stop(now+0.5); + } + + //dispose all the tap listeners + if (StartAudioContext._tapListeners){ + for (var i = 0; i < StartAudioContext._tapListeners.length; i++){ + StartAudioContext._tapListeners[i].dispose(); + } + StartAudioContext._tapListeners = null; + } + //the onstarted callbacks + if (StartAudioContext._onStarted){ + for (var j = 0; j < StartAudioContext._onStarted.length; j++){ + StartAudioContext._onStarted[j](); + } + StartAudioContext._onStarted = null; + } + } + + return StartAudioContext; +})); diff --git a/app/client/audio/wav2pix.js b/app/client/audio/wav2pix.js new file mode 100644 index 0000000..3e86c40 --- /dev/null +++ b/app/client/audio/wav2pix.js @@ -0,0 +1,153 @@ +import Tone from 'tone' +import JSZip from 'jszip' +import { sprintf } from 'sprintf-js' +import FileSaver from 'file-saver' + +import draw from './draw' +import keys from './lib/keys' +import mouse from './lib/mouse' +import output from './lib/output' +import spectrum from './lib/spectrum' + +import { + requestAudioContext, + lerp, clamp, mod, +} from './lib/util' + +let selfDrag = false +let buffer +let players = [] +let gallery +let sr = 44100 +let last_i = 0 +let _r = 8, _i = 8 +function init(){ + requestAudioContext(ready) + document.body.addEventListener('dragover', dragover) + document.body.addEventListener('dragstart', dragstart) + document.body.addEventListener('drop', drop) + document.querySelector("#upload").addEventListener('change', handleFileSelect) + // draw.onFrame(() => {}) + draw.resize(256, 256) + + gallery = document.querySelector('#gallery') + mouse.register({ + move: (x, y) => { + } + }) + keys.listen((z) => { + // console.log(z) + play(mod(z, players.length)) + }) +} +function ready(){ +} +function dragover (e) { + e.preventDefault() +} +function dragstart (e) { + selfDrag = true +} +function drop (e) { + e.stopPropagation() + e.preventDefault() + + if (e.dataTransfer && ! selfDrag) { + if (e.dataTransfer.files.length) { + handleFileSelect(e) + } + } + else { + handleFileSelect(e) + } + selfDrag = false +} +let files, file_index = 0; +function handleFileSelect(e){ + files = e.dataTransfer ? e.dataTransfer.files : e.target.files + loadNext() +} +function loadNext(){ + var file = files[file_index++] + if (! file) return + load(file) +} +function load(file){ + players = [] + buffer = new Tone.Buffer(URL.createObjectURL(file), loadBuffer, (err) => console.error('err', err)) +} +function loadBuffer(){ + console.log('loaded buffer', buffer) + const pcm = buffer._buffer.getChannelData(0) + sr = buffer._buffer.sampleRate + if (! pcm) return + + const FRAME_LENGTH = 126 * 255 + const FRAME_OFFSET = FRAME_LENGTH / 4 + + var zip = new JSZip() + + var zip_folder = zip.folder("images"); + + for (var offset = 0, count = 0, _len = pcm.length - FRAME_LENGTH; offset < _len; offset += FRAME_OFFSET, count += 1) { + if ((count % 100) === 0) console.log(count) + // console.log('generating', count, offset) + // let player = render(pcm.slice(offset, offset+FRAME_LENGTH), count, zip_folder) + render(pcm.slice(offset, offset+FRAME_LENGTH), count, zip_folder) + // register(player, count) + // if (count > 20) break + } + + console.log('done exporting') + zip.generateAsync({type:"blob"}).then(function(content) { + console.log('saving zip') + FileSaver.saveAs(content, "img2pix.zip") + setTimeout(loadNext, 1000) + }) + // play(0) +} +function render(pcm, count, zip){ + const fft = spectrum.toSpectrum(pcm, sr) + // console.log('render', fft) + // const pcm_rev = pcm.slice().reverse() + // const spec_rev = spectrum.toSpectrum(pcm_rev, spec.sr) + draw.clear() + const { canvas, imageData } = draw.raw_spectrum(fft, 0, 256, 0, 256, _r, _i) + const dataURL = canvas.toDataURL("image/png") + if (zip) { + const fn = sprintf('frame_%05d.png', count) + zip.file(fn, dataURL.split(',')[1], {base64: true}) + } + return { fft, canvas, imageData } +} +function play(i) { + // console.log('play', i) + last_i = i + let player = players[clamp(i, 0, players.length)] + // const { canvas, imageData } = draw.raw_spectrum(fft, 0, 256, 0, 256, 1, 1) + // console.log(_r, _i) + // const { canvas, imageData } = draw.raw_spectrum(player.fft, 0, 256, 0, 256, _r, _i) + const new_fft = spectrum.fromImageData(player.imageData, 44100, _r, _i) + // gallery.innerHTML = '' + + // console.log(player.fft.data, new_fft.data) + const buf = spectrum.fromSpectrum(new_fft) + const _p = new Tone.Player(buf) + _p.connect(output) + _p.start(Tone.now()) + redraw(new_fft) +} +function redraw(new_fft){ + const { canvas, imageData } = draw.raw_spectrum(new_fft, 0, 256, 0, 256, _r, _i) +} +function register(player, i){ + // console.log('register', player) + players.push(player) + player.canvas.addEventListener('click', () => { + play(i) + }) + if (i < 20) { + gallery.appendChild(player.canvas) + } +} +init() diff --git a/app/client/util/format.js b/app/client/util/format.js new file mode 100644 index 0000000..e436a3e --- /dev/null +++ b/app/client/util/format.js @@ -0,0 +1,132 @@ +export function timeInSeconds(n){ + return (n / 10).toFixed(1) + ' s.' +} +export function gerund(s){ + return s.replace(/e?$/, 'ing') +} +export function commatize (n, radix) { + radix = radix || 1024 + var nums = [], i, counter = 0, r = Math.floor + if (n > radix) { + n /= radix + nums.unshift(r((n * 10) % 10)) + nums.unshift(".") + } + do { + i = n % 10 + n = r(n / 10) + if (n && ! (++counter % 3)) + { i = ' ' + r(i) } + nums.unshift(r(i)) + } + while (n) + return nums.join("") +} +export function carbon_date (date, no_bold) { + var span = (+new Date() - new Date(date)) / 1000, color + if (! no_bold && span < 86400) // modified today + { color = "new" } + else if (span < 604800) // modifed this week + { color = "recent" } + else if (span < 1209600) // modifed 2 weeks ago + { color = "med" } + else if (span < 3024000) // modifed 5 weeks ago + { color = "old" } + else if (span < 12315200) // modifed 6 months ago + { color = "older" } + else + { color = "quiet" } + return color +} +export function hush_views (n, bias, no_bold) { + var txt = commatize(n, 1000) + bias = bias || 1 + n = n || 0 + if (n < 30) { return["quiet", n + " v."] } + if (n < 200) { return ["quiet", txt + " v."] } + else if (n < 500) { return ["quiet", txt + " v."] } + else if (n < 1000) { return ["old", txt + " v."] } + else if (n < 5000) { return ["med", txt + " kv."] } + else if (no_bold || n < 10000) { return ["recent", txt + " kv."] } + else { return ["new", txt + " kv."] } +} +export function hush_threads (n, bias, no_bold) { + var txt = commatize(n, 1000) + bias = bias || 1 + n = n || 0 + if (n < 10) { return["quiet", n + " t."] } + else if (n < 25) { return ["old", txt + " t."] } + else if (n < 50) { return ["med", txt + " t."] } + else if (no_bold || n < 100) { return ["recent", txt + " t."] } + else { return ["new", txt + " t."] } +} +export function hush_size (n, bias, no_bold) { + var txt = commatize(Math.round(n / 1024)) + bias = 1 || bias + n = n || 0 + if (! n) { return ['', ''] } + if (n < 1000) { + return ["quiet", n + " b."] + } + if (n < 1000000) { + return ["quiet", txt + " kb."] + } + else if (n < (20000000/bias)) { + return ["quiet", txt + " mb."] + } + else if (n < (50000000/bias)) { + return ["old", txt + " mb."] + } + else if (n < (80000000/bias)) { + return ["med", txt + " mb."] + } + else if (no_bold || n < (170000000/bias)) { + return ["recent", txt + " mb."] + } + else { + return ["new", txt + " mb."] + } +} +export function hush_null (n, unit, no_bold) { + var s = unit ? n + " " + unit + "." : n + if (n < 3) { + return ["quiet", s] + } + else if (n < 6) { + return ["older", s] + } + else if (n < 10) { + return ["old", s] + } + else if (n < 16) { + return ["med", s] + } + else if (no_bold || n < 21) { + return ["recent", s] + } + else { + return ["new", s] + } +} +export function get_age (t) { + var age = Math.abs(+Date.now() - new Date(t))/1000 + var r = Math.floor + var m + if (age < 5) { return "now" } + if (age < 60) { return r(age) + "s" } + age /= 60 + if (age < 60) { return r(age) + "m" } + m = r(age % 60) + age /= 60 + if (m > 0 && age < 2) { return r(age) + "h" + m + "m" } + if (age < 24) { return r(age) + "h" } + age /= 24 + if (age < 7) { return r(age) + "d" } + age /= 7 + if (age < 12) { return r(age) + "w" } + age /= 4 + if (age < 12) { return r(age) + "m" } + age /= 12 + return r(age) + "y" +} +export function courtesy_s (n, s) { return n == 1 ? "" : (s || "s") } diff --git a/app/client/util/index.js b/app/client/util/index.js index c5eb8dd..a811dcf 100644 --- a/app/client/util/index.js +++ b/app/client/util/index.js @@ -1,7 +1,11 @@ import * as sort from './sort' +import * as format from './format' +import * as maths from './math' export { - sort + sort, + ...maths, + ...format, } export const is_iphone = !!((navigator.userAgent.match(/iPhone/i)) || (navigator.userAgent.match(/iPod/i))) @@ -16,13 +20,6 @@ htmlClassList.remove('loading') // window.debug = false -export function clamp(n,a,b) { return n<a?a:n<b?n:b } -export function norm(n,a,b) { return (n-a) / (b-a) } -export function lerp(n,a,b) { return (b-a)*n+a } -export function mix(n,a,b) { return a*(1-n)+b*n } -export function randint(n) { return Math.floor(Math.random()*n) } -export function randrange(a,b){ return Math.random() * (b-a) + a } - document.body.style.backgroundImage = 'linear-gradient(' + (randint(40)+40) + 'deg, #fde, #ffe)' export const allProgress = (promises, progress_cb) => { @@ -37,136 +34,3 @@ export const allProgress = (promises, progress_cb) => { }) return Promise.all(promises) } - -export function timeInSeconds(n){ - return (n / 10).toFixed(1) + ' s.' -} -export function gerund(s){ - return s.replace(/e?$/, 'ing') -} -export function commatize (n, radix) { - radix = radix || 1024 - var nums = [], i, counter = 0, r = Math.floor - if (n > radix) { - n /= radix - nums.unshift(r((n * 10) % 10)) - nums.unshift(".") - } - do { - i = n % 10 - n = r(n / 10) - if (n && ! (++counter % 3)) - { i = ' ' + r(i) } - nums.unshift(r(i)) - } - while (n) - return nums.join("") -} -export function carbon_date (date, no_bold) { - var span = (+new Date() - new Date(date)) / 1000, color - if (! no_bold && span < 86400) // modified today - { color = "new" } - else if (span < 604800) // modifed this week - { color = "recent" } - else if (span < 1209600) // modifed 2 weeks ago - { color = "med" } - else if (span < 3024000) // modifed 5 weeks ago - { color = "old" } - else if (span < 12315200) // modifed 6 months ago - { color = "older" } - else - { color = "quiet" } - return color -} -export function hush_views (n, bias, no_bold) { - var txt = commatize(n, 1000) - bias = bias || 1 - n = n || 0 - if (n < 30) { return["quiet", n + " v."] } - if (n < 200) { return ["quiet", txt + " v."] } - else if (n < 500) { return ["quiet", txt + " v."] } - else if (n < 1000) { return ["old", txt + " v."] } - else if (n < 5000) { return ["med", txt + " kv."] } - else if (no_bold || n < 10000) { return ["recent", txt + " kv."] } - else { return ["new", txt + " kv."] } -} -export function hush_threads (n, bias, no_bold) { - var txt = commatize(n, 1000) - bias = bias || 1 - n = n || 0 - if (n < 10) { return["quiet", n + " t."] } - else if (n < 25) { return ["old", txt + " t."] } - else if (n < 50) { return ["med", txt + " t."] } - else if (no_bold || n < 100) { return ["recent", txt + " t."] } - else { return ["new", txt + " t."] } -} -export function hush_size (n, bias, no_bold) { - var txt = commatize(Math.round(n / 1024)) - bias = 1 || bias - n = n || 0 - if (! n) { return ['', ''] } - if (n < 1000) { - return ["quiet", n + " b."] - } - if (n < 1000000) { - return ["quiet", txt + " kb."] - } - else if (n < (20000000/bias)) { - return ["quiet", txt + " mb."] - } - else if (n < (50000000/bias)) { - return ["old", txt + " mb."] - } - else if (n < (80000000/bias)) { - return ["med", txt + " mb."] - } - else if (no_bold || n < (170000000/bias)) { - return ["recent", txt + " mb."] - } - else { - return ["new", txt + " mb."] - } -} -export function hush_null (n, unit, no_bold) { - var s = unit ? n + " " + unit + "." : n - if (n < 3) { - return ["quiet", s] - } - else if (n < 6) { - return ["older", s] - } - else if (n < 10) { - return ["old", s] - } - else if (n < 16) { - return ["med", s] - } - else if (no_bold || n < 21) { - return ["recent", s] - } - else { - return ["new", s] - } -} -export function get_age (t) { - var age = Math.abs(+Date.now() - new Date(t))/1000 - var r = Math.floor - var m - if (age < 5) { return "now" } - if (age < 60) { return r(age) + "s" } - age /= 60 - if (age < 60) { return r(age) + "m" } - m = r(age % 60) - age /= 60 - if (m > 0 && age < 2) { return r(age) + "h" + m + "m" } - if (age < 24) { return r(age) + "h" } - age /= 24 - if (age < 7) { return r(age) + "d" } - age /= 7 - if (age < 12) { return r(age) + "w" } - age /= 4 - if (age < 12) { return r(age) + "m" } - age /= 12 - return r(age) + "y" -} -export function courtesy_s (n, s) { return n == 1 ? "" : (s || "s") } diff --git a/app/client/util/math.js b/app/client/util/math.js new file mode 100644 index 0000000..253bacd --- /dev/null +++ b/app/client/util/math.js @@ -0,0 +1,52 @@ +export function mod(n,m){ return n-(m * Math.floor(n/m)) } +export function clamp(n,a,b) { return n<a?a:n<b?n:b } +export function norm(n,a,b) { return (n-a) / (b-a) } +export function lerp(n,a,b) { return (b-a)*n+a } +export function mix(n,a,b) { return a*(1-n)+b*n } +export function randint(n) { return Math.floor(Math.random()*n) } +export function randrange(a,b){ return Math.random() * (b-a) + a } +export function randsign(){ return Math.random() >= 0.5 ? -1 : 1 } +export function choice (a){ return a[ Math.floor(Math.random() * a.length) ] } +export function lerp(n,a,b){ return (b-a)*n+a } +export function angle(x0,y0,x1,y1){ return Math.atan2(y1-y0,x1-x0) } +export function dist(x0,y0,x1,y1){ return Math.sqrt(Math.pow(x1-x0,2)+Math.pow(y1-y0,2)) } +export function xor(a,b){ a=!!a; b=!!b; return (a||b) && !(a&&b) } +export function quantize(a,b){ return Math.floor(a/b)*b } +export function shuffle(a){ + for (var i = a.length; i > 0; i--){ + var r = randint(i) + var swap = a[i-1] + a[i-1] = a[r] + a[r] = swap + } + return a +} +// returns a gaussian random function with the given mean and stdev. +export function gaussian(mean, stdev) { + let y2; + let use_last = false; + return () => { + let y1; + if (use_last) { + y1 = y2; + use_last = false; + } + else { + let x1, x2, w; + do { + x1 = 2.0 * Math.random() - 1.0; + x2 = 2.0 * Math.random() - 1.0; + w = x1 * x1 + x2 * x2; + } while( w >= 1.0); + w = Math.sqrt((-2.0 * Math.log(w))/w); + y1 = x1 * w; + y2 = x2 * w; + use_last = true; + } + + let retval = mean + stdev * y1; + if (retval > 0) + return retval; + return -retval; + } +} |
