diff options
| author | Jules Laplace <julescarbon@gmail.com> | 2018-06-06 00:59:39 +0200 |
|---|---|---|
| committer | Jules Laplace <julescarbon@gmail.com> | 2018-06-06 00:59:39 +0200 |
| commit | d3fcd1212f7214b12b04a83d03dfb129c5fbb0a4 (patch) | |
| tree | b9cede8c8b245d5dc5d3ed23d8879e603af82809 /app/client/audio | |
| parent | 8c8e2e08d2ae89ba18ca05bab446e4642798dce2 (diff) | |
pix2wav paths
Diffstat (limited to 'app/client/audio')
| -rw-r--r-- | app/client/audio/index.js | 2 | ||||
| -rw-r--r-- | app/client/audio/lib/_draw.js | 139 | ||||
| -rw-r--r-- | app/client/audio/lib/draw.js (renamed from app/client/audio/draw.js) | 138 | ||||
| -rw-r--r-- | app/client/audio/lib/index.js | 47 | ||||
| -rw-r--r-- | app/client/audio/lib/output.js (renamed from app/client/audio/output.js) | 0 | ||||
| -rw-r--r-- | app/client/audio/lib/spectrum.js (renamed from app/client/audio/spectrum.js) | 0 | ||||
| -rw-r--r-- | app/client/audio/lib/startAudioContext.js (renamed from app/client/audio/startAudioContext.js) | 0 | ||||
| -rw-r--r-- | app/client/audio/pix2wav.js | 35 | ||||
| -rw-r--r-- | app/client/audio/ui.js | 12 | ||||
| -rw-r--r-- | app/client/audio/wav2pix.js | 149 |
10 files changed, 271 insertions, 251 deletions
diff --git a/app/client/audio/index.js b/app/client/audio/index.js deleted file mode 100644 index 8b3da74..0000000 --- a/app/client/audio/index.js +++ /dev/null @@ -1,2 +0,0 @@ -import Tone from 'tone' -import StartAudioContext from './startAudioContext' diff --git a/app/client/audio/lib/_draw.js b/app/client/audio/lib/_draw.js new file mode 100644 index 0000000..974fa62 --- /dev/null +++ b/app/client/audio/lib/_draw.js @@ -0,0 +1,139 @@ +import { + browser, requestAudioContext, + randint, randrange, clamp, mod, +} from './lib/util' + +import './lib/vendor/hidpi-canvas' + +import mouse from './lib/mouse' +import color from './lib/color' + +let w, h +let rx, ry + +const pixels_per_second = 512 // 1024 + +const canvas = document.createElement('canvas') +// document.body.appendChild(canvas) +// document.body.addEventListener('resize', resize) +resize() +recenter() +requestAnimationFrame(animate) + +// must request context after resizing +const ctx = canvas.getContext('2d') + +const scratch = document.createElement('canvas') +const scratchCtx = scratch.getContext('2d-lodpi') + +function resize(ww, hh){ + w = canvas.width = ww || window.innerWidth + h = canvas.height = hh || window.innerHeight + canvas.style.width = w + 'px' + canvas.style.height = h + 'px' +} +function recenter(){ + rx = randint(w), ry = randint(h) +} +let frame = null +function onFrame(fn){ + frame = fn +} +function animate(t){ + requestAnimationFrame(animate) + if (frame) { + frame(t) + frame = null + } + // ctx.save() + // ctx.globalAlpha = 0.0001 + // ctx.translate(w/2, h/2) + // ctx.rotate(0.1) + // ctx.translate(-rx, -ry) + // ctx.drawImage(canvas, 0, 0) + // ctx.restore() +} +function clear(n, x, y, ww, hh){ + ctx.fillStyle = 'rgba(255,255,255,' + (n || 0.9) + ')' + ctx.fillRect(x || 0, y || 0, ww || w, hh || h) + recenter() +} +function triangle(px,py,r){ + setTimeout( () => tri(px,py,r), Math.random()*10) + // setTimeout( () => tri(px,py,r), Math.random()*200) + // setTimeout( () => tri(px,py,r), Math.random()*300) +} +function tri(px, py, r) { + ctx.save() + ctx.globalCompositeOperation = 'multiply' + ctx.fillStyle = color.color((px+py)/(w+h), 0, 1, 0.2) + function p(){ + let theta = randrange(0, Math.PI*2) + let x = px + Math.cos(theta) * r + let y = py + Math.sin(theta) * r + return { x, y } + } + ctx.beginPath() + const p0 = p(), p1 = p(), p2 = p() + ctx.moveTo(p0.x, p0.y) + ctx.lineTo(p1.x, p1.y) + ctx.lineTo(p2.x, p2.y) + ctx.lineTo(p0.x, p0.y) + ctx.fill() + ctx.restore() +} +function line(y){ + ctx.beginPath() + ctx.moveTo(0, y) + ctx.lineTo(w, y) + ctx.strokeStyle = "#888" + ctx.strokeWidth = 1 + ctx.stroke() +} +function dot(x, y, r){ + ctx.fillStyle = "#f00" + ctx.beginPath() + ctx.moveTo(x, y) + ctx.arc(x, y, r, 0, 2*Math.PI) + ctx.fill() +} +function waveform(pcm, sr, pos, zoom){ + sr = sr || 44100 + pos = pos || 0 + + var width = w + var height = Math.floor(h/4) + var half_height = Math.floor(height/2) + var x0 = 0 + var y0 = 20 + var ymid = y0 + half_height + var max_width_in_seconds = width / pixels_per_second + var max_width_in_samples = max_width_in_seconds * sr + var pcm_length = pcm.length + var len = Math.min(pcm_length, max_width_in_samples) + var pcm_step = sr / pixels_per_second + var i + ctx.save() + + clear(1, x0, y0, width, height) + + line(ymid) + ctx.beginPath() + for (i = 0; i < width; i += 0.5) { + var si = Math.floor(pcm_step * i + pos) + if (si > pcm_length) break + var val = pcm[si] // -1, 1 + // ctx.moveTo(x0 + i, ymid) + ctx.lineTo(x0 + i, ymid + val * half_height) + } + ctx.strokeStyle = "rgba(250,20,0,0.9)" + ctx.strokeWidth = 1 + ctx.stroke() + ctx.restore() +} + +export default { + canvas, ctx, onFrame, resize, + triangle, clear, line, dot, + waveform, spectrum, raw_spectrum, +}
\ No newline at end of file diff --git a/app/client/audio/draw.js b/app/client/audio/lib/draw.js index 8caf8d8..f5ba3ac 100644 --- a/app/client/audio/draw.js +++ b/app/client/audio/lib/draw.js @@ -1,137 +1,6 @@ -import { - browser, requestAudioContext, - randint, randrange, clamp, mod, -} from './lib/util' - -import './lib/vendor/hidpi-canvas' - -import mouse from './lib/mouse' -import color from './lib/color' - -let w, h -let rx, ry - -const pixels_per_second = 512 // 1024 - -const canvas = document.createElement('canvas') -// document.body.appendChild(canvas) -// document.body.addEventListener('resize', resize) -resize() -recenter() -requestAnimationFrame(animate) - -// must request context after resizing -const ctx = canvas.getContext('2d') - const scratch = document.createElement('canvas') const scratchCtx = scratch.getContext('2d-lodpi') -function resize(ww, hh){ - w = canvas.width = ww || window.innerWidth - h = canvas.height = hh || window.innerHeight - canvas.style.width = w + 'px' - canvas.style.height = h + 'px' -} -function recenter(){ - rx = randint(w), ry = randint(h) -} -let frame = null -function onFrame(fn){ - frame = fn -} -function animate(t){ - requestAnimationFrame(animate) - if (frame) { - frame(t) - frame = null - } - // ctx.save() - // ctx.globalAlpha = 0.0001 - // ctx.translate(w/2, h/2) - // ctx.rotate(0.1) - // ctx.translate(-rx, -ry) - // ctx.drawImage(canvas, 0, 0) - // ctx.restore() -} -function clear(n, x, y, ww, hh){ - ctx.fillStyle = 'rgba(255,255,255,' + (n || 0.9) + ')' - ctx.fillRect(x || 0, y || 0, ww || w, hh || h) - recenter() -} -function triangle(px,py,r){ - setTimeout( () => tri(px,py,r), Math.random()*10) - // setTimeout( () => tri(px,py,r), Math.random()*200) - // setTimeout( () => tri(px,py,r), Math.random()*300) -} -function tri(px, py, r) { - ctx.save() - ctx.globalCompositeOperation = 'multiply' - ctx.fillStyle = color.color((px+py)/(w+h), 0, 1, 0.2) - function p(){ - let theta = randrange(0, Math.PI*2) - let x = px + Math.cos(theta) * r - let y = py + Math.sin(theta) * r - return { x, y } - } - ctx.beginPath() - const p0 = p(), p1 = p(), p2 = p() - ctx.moveTo(p0.x, p0.y) - ctx.lineTo(p1.x, p1.y) - ctx.lineTo(p2.x, p2.y) - ctx.lineTo(p0.x, p0.y) - ctx.fill() - ctx.restore() -} -function line(y){ - ctx.beginPath() - ctx.moveTo(0, y) - ctx.lineTo(w, y) - ctx.strokeStyle = "#888" - ctx.strokeWidth = 1 - ctx.stroke() -} -function dot(x, y, r){ - ctx.fillStyle = "#f00" - ctx.beginPath() - ctx.moveTo(x, y) - ctx.arc(x, y, r, 0, 2*Math.PI) - ctx.fill() -} -function waveform(pcm, sr, pos, zoom){ - sr = sr || 44100 - pos = pos || 0 - - var width = w - var height = Math.floor(h/4) - var half_height = Math.floor(height/2) - var x0 = 0 - var y0 = 20 - var ymid = y0 + half_height - var max_width_in_seconds = width / pixels_per_second - var max_width_in_samples = max_width_in_seconds * sr - var pcm_length = pcm.length - var len = Math.min(pcm_length, max_width_in_samples) - var pcm_step = sr / pixels_per_second - var i - ctx.save() - - clear(1, x0, y0, width, height) - - line(ymid) - ctx.beginPath() - for (i = 0; i < width; i += 0.5) { - var si = Math.floor(pcm_step * i + pos) - if (si > pcm_length) break - var val = pcm[si] // -1, 1 - // ctx.moveTo(x0 + i, ymid) - ctx.lineTo(x0 + i, ymid + val * half_height) - } - ctx.strokeStyle = "rgba(250,20,0,0.9)" - ctx.strokeWidth = 1 - ctx.stroke() - ctx.restore() -} - function spectrum(spec, x0, y0, ww, hh){ const data = spec.data const fft_size = spec.fft_size @@ -186,6 +55,7 @@ function spectrum(spec, x0, y0, ww, hh){ ctx.drawImage(scratch, x0, y0, width, height) ctx.restore() } + function raw_spectrum(spec, x0, y0, ww, hh, def_min_r, def_min_i){ const data = spec.data const fft_size = spec.fft_size @@ -262,9 +132,3 @@ function raw_spectrum(spec, x0, y0, ww, hh, def_min_r, def_min_i){ return { canvas: _scratch, imageData } } - -export default { - canvas, ctx, onFrame, resize, - triangle, clear, line, dot, - waveform, spectrum, raw_spectrum, -}
\ No newline at end of file diff --git a/app/client/audio/lib/index.js b/app/client/audio/lib/index.js new file mode 100644 index 0000000..ba96112 --- /dev/null +++ b/app/client/audio/lib/index.js @@ -0,0 +1,47 @@ +import Tone from 'tone' +import StartAudioContext from './startAudioContext' + +import { is_mobile } from '../util' + +export function requestAudioContext (fn) { + if (is_mobile) { + const container = document.createElement('div') + const button = document.createElement('div') + button.innerHTML = 'Tap to start - please unmute your phone' + Object.assign(container.style, { + display: 'block', + position: 'absolute', + width: '100%', + height: '100%', + zIndex: '10000', + top: '0px', + left: '0px', + backgroundColor: 'rgba(0, 0, 0, 0.8)', + }) + Object.assign(button.style, { + display: 'block', + position: 'absolute', + left: '50%', + top: '50%', + padding: '20px', + backgroundColor: '#7F33ED', + color: 'white', + fontFamily: 'monospace', + borderRadius: '3px', + transform: 'translate3D(-50%,-50%,0)', + textAlign: 'center', + lineHeight: '1.5', + width: '150px', + }) + container.appendChild(button) + document.body.appendChild(container) + StartAudioContext.setContext(Tone.context) + StartAudioContext.on(button) + StartAudioContext.onStarted(_ => { + container.remove() + fn() + }) + } else { + fn() + } +}
\ No newline at end of file diff --git a/app/client/audio/output.js b/app/client/audio/lib/output.js index 53901b3..53901b3 100644 --- a/app/client/audio/output.js +++ b/app/client/audio/lib/output.js diff --git a/app/client/audio/spectrum.js b/app/client/audio/lib/spectrum.js index f4a5444..f4a5444 100644 --- a/app/client/audio/spectrum.js +++ b/app/client/audio/lib/spectrum.js diff --git a/app/client/audio/startAudioContext.js b/app/client/audio/lib/startAudioContext.js index 0e257be..0e257be 100644 --- a/app/client/audio/startAudioContext.js +++ b/app/client/audio/lib/startAudioContext.js diff --git a/app/client/audio/pix2wav.js b/app/client/audio/pix2wav.js new file mode 100644 index 0000000..ccd36be --- /dev/null +++ b/app/client/audio/pix2wav.js @@ -0,0 +1,35 @@ +function render(pcm, count, zip){ + const fft = spectrum.toSpectrum(pcm, sr) + // console.log('render', fft) + // const pcm_rev = pcm.slice().reverse() + // const spec_rev = spectrum.toSpectrum(pcm_rev, spec.sr) + draw.clear() + const { canvas, imageData } = draw.raw_spectrum(fft, 0, 256, 0, 256, _r, _i) + const dataURL = canvas.toDataURL("image/png") + if (zip) { + const fn = sprintf('frame_%05d.png', count) + zip.file(fn, dataURL.split(',')[1], {base64: true}) + } + return { fft, canvas, imageData } +} +function play(i) { + // console.log('play', i) + last_i = i + let player = players[clamp(i, 0, players.length)] + // const { canvas, imageData } = draw.raw_spectrum(fft, 0, 256, 0, 256, 1, 1) + // console.log(_r, _i) + // const { canvas, imageData } = draw.raw_spectrum(player.fft, 0, 256, 0, 256, _r, _i) + const new_fft = spectrum.fromImageData(player.imageData, 44100, _r, _i) + // gallery.innerHTML = '' + + // console.log(player.fft.data, new_fft.data) + const buf = spectrum.fromSpectrum(new_fft) + const _p = new Tone.Player(buf) + _p.connect(output) + _p.start(Tone.now()) + redraw(new_fft) +} +function redraw(new_fft){ + const { canvas, imageData } = draw.raw_spectrum(new_fft, 0, 256, 0, 256, _r, _i) +} + diff --git a/app/client/audio/ui.js b/app/client/audio/ui.js new file mode 100644 index 0000000..76ffb09 --- /dev/null +++ b/app/client/audio/ui.js @@ -0,0 +1,12 @@ +/* + +mouse.register({ + move: (x, y) => { + } +}) +keys.listen((z) => { + // console.log(z) + play(mod(z, players.length)) +}) + +*/ diff --git a/app/client/audio/wav2pix.js b/app/client/audio/wav2pix.js index 3e86c40..089816d 100644 --- a/app/client/audio/wav2pix.js +++ b/app/client/audio/wav2pix.js @@ -1,153 +1,78 @@ import Tone from 'tone' import JSZip from 'jszip' -import { sprintf } from 'sprintf-js' import FileSaver from 'file-saver' -import draw from './draw' -import keys from './lib/keys' -import mouse from './lib/mouse' +import draw from './lib/draw' import output from './lib/output' import spectrum from './lib/spectrum' import { - requestAudioContext, lerp, clamp, mod, -} from './lib/util' +} from '../util' + +import { requestAudioContext } from './lib' -let selfDrag = false let buffer -let players = [] -let gallery let sr = 44100 let last_i = 0 let _r = 8, _i = 8 -function init(){ - requestAudioContext(ready) - document.body.addEventListener('dragover', dragover) - document.body.addEventListener('dragstart', dragstart) - document.body.addEventListener('drop', drop) - document.querySelector("#upload").addEventListener('change', handleFileSelect) - // draw.onFrame(() => {}) - draw.resize(256, 256) - gallery = document.querySelector('#gallery') - mouse.register({ - move: (x, y) => { - } - }) - keys.listen((z) => { - // console.log(z) - play(mod(z, players.length)) - }) -} -function ready(){ -} -function dragover (e) { - e.preventDefault() -} -function dragstart (e) { - selfDrag = true -} -function drop (e) { - e.stopPropagation() - e.preventDefault() +let files, file_index = 0; - if (e.dataTransfer && ! selfDrag) { - if (e.dataTransfer.files.length) { - handleFileSelect(e) - } - } - else { - handleFileSelect(e) - } - selfDrag = false +const FRAME_LENGTH = 126 * 255 +const FRAME_OFFSET = FRAME_LENGTH / 4 + +function init() { + requestAudioContext(ready) + draw.resize(256, 256) } -let files, file_index = 0; -function handleFileSelect(e){ +function handleFileSelect(e) { files = e.dataTransfer ? e.dataTransfer.files : e.target.files loadNext() } -function loadNext(){ +function loadNext() { var file = files[file_index++] if (! file) return load(file) } -function load(file){ - players = [] - buffer = new Tone.Buffer(URL.createObjectURL(file), loadBuffer, (err) => console.error('err', err)) +function load(file) { + buffer = new Tone.Buffer( + URL.createObjectURL(file), + loadBuffer, + (err) => console.error('err', err) + ) } -function loadBuffer(){ +function loadBuffer() { + // dispatch console.log('loaded buffer', buffer) const pcm = buffer._buffer.getChannelData(0) - sr = buffer._buffer.sampleRate + const sr = buffer._buffer.sampleRate if (! pcm) return - const FRAME_LENGTH = 126 * 255 - const FRAME_OFFSET = FRAME_LENGTH / 4 + const zip = new JSZip() + const zip_folder = zip.folder("wav2pix_" + name); - var zip = new JSZip() - - var zip_folder = zip.folder("images"); - - for (var offset = 0, count = 0, _len = pcm.length - FRAME_LENGTH; offset < _len; offset += FRAME_OFFSET, count += 1) { - if ((count % 100) === 0) console.log(count) - // console.log('generating', count, offset) - // let player = render(pcm.slice(offset, offset+FRAME_LENGTH), count, zip_folder) + const offset = 0 + for (offset = 0, count = 0, _len = pcm.length - FRAME_LENGTH; + offset < _len; + offset += FRAME_OFFSET, count += 1 + ) { + if ((count % 100) === 0) { + // dispatch event instead.. + console.log(count) + } render(pcm.slice(offset, offset+FRAME_LENGTH), count, zip_folder) - // register(player, count) - // if (count > 20) break } + // dispatch event console.log('done exporting') - zip.generateAsync({type:"blob"}).then(function(content) { + zip.generateAsync({ type: "blob" }).then(content => { + // dispatch console.log('saving zip') - FileSaver.saveAs(content, "img2pix.zip") + // FileSaver.saveAs(content, "wav2pix_" + name + ".zip") setTimeout(loadNext, 1000) }) // play(0) } -function render(pcm, count, zip){ - const fft = spectrum.toSpectrum(pcm, sr) - // console.log('render', fft) - // const pcm_rev = pcm.slice().reverse() - // const spec_rev = spectrum.toSpectrum(pcm_rev, spec.sr) - draw.clear() - const { canvas, imageData } = draw.raw_spectrum(fft, 0, 256, 0, 256, _r, _i) - const dataURL = canvas.toDataURL("image/png") - if (zip) { - const fn = sprintf('frame_%05d.png', count) - zip.file(fn, dataURL.split(',')[1], {base64: true}) - } - return { fft, canvas, imageData } -} -function play(i) { - // console.log('play', i) - last_i = i - let player = players[clamp(i, 0, players.length)] - // const { canvas, imageData } = draw.raw_spectrum(fft, 0, 256, 0, 256, 1, 1) - // console.log(_r, _i) - // const { canvas, imageData } = draw.raw_spectrum(player.fft, 0, 256, 0, 256, _r, _i) - const new_fft = spectrum.fromImageData(player.imageData, 44100, _r, _i) - // gallery.innerHTML = '' - // console.log(player.fft.data, new_fft.data) - const buf = spectrum.fromSpectrum(new_fft) - const _p = new Tone.Player(buf) - _p.connect(output) - _p.start(Tone.now()) - redraw(new_fft) -} -function redraw(new_fft){ - const { canvas, imageData } = draw.raw_spectrum(new_fft, 0, 256, 0, 256, _r, _i) -} -function register(player, i){ - // console.log('register', player) - players.push(player) - player.canvas.addEventListener('click', () => { - play(i) - }) - if (i < 20) { - gallery.appendChild(player.canvas) - } -} init() |
