summaryrefslogtreecommitdiff
path: root/app/client/audio/lib
diff options
context:
space:
mode:
Diffstat (limited to 'app/client/audio/lib')
-rw-r--r--app/client/audio/lib/_draw.js139
-rw-r--r--app/client/audio/lib/draw.js134
-rw-r--r--app/client/audio/lib/index.js47
-rw-r--r--app/client/audio/lib/output.js8
-rw-r--r--app/client/audio/lib/spectrum.js278
-rw-r--r--app/client/audio/lib/startAudioContext.js179
6 files changed, 785 insertions, 0 deletions
diff --git a/app/client/audio/lib/_draw.js b/app/client/audio/lib/_draw.js
new file mode 100644
index 0000000..974fa62
--- /dev/null
+++ b/app/client/audio/lib/_draw.js
@@ -0,0 +1,139 @@
+import {
+ browser, requestAudioContext,
+ randint, randrange, clamp, mod,
+} from './lib/util'
+
+import './lib/vendor/hidpi-canvas'
+
+import mouse from './lib/mouse'
+import color from './lib/color'
+
+let w, h
+let rx, ry
+
+const pixels_per_second = 512 // 1024
+
+const canvas = document.createElement('canvas')
+// document.body.appendChild(canvas)
+// document.body.addEventListener('resize', resize)
+resize()
+recenter()
+requestAnimationFrame(animate)
+
+// must request context after resizing
+const ctx = canvas.getContext('2d')
+
+const scratch = document.createElement('canvas')
+const scratchCtx = scratch.getContext('2d-lodpi')
+
+function resize(ww, hh){
+ w = canvas.width = ww || window.innerWidth
+ h = canvas.height = hh || window.innerHeight
+ canvas.style.width = w + 'px'
+ canvas.style.height = h + 'px'
+}
+function recenter(){
+ rx = randint(w), ry = randint(h)
+}
+let frame = null
+function onFrame(fn){
+ frame = fn
+}
+function animate(t){
+ requestAnimationFrame(animate)
+ if (frame) {
+ frame(t)
+ frame = null
+ }
+ // ctx.save()
+ // ctx.globalAlpha = 0.0001
+ // ctx.translate(w/2, h/2)
+ // ctx.rotate(0.1)
+ // ctx.translate(-rx, -ry)
+ // ctx.drawImage(canvas, 0, 0)
+ // ctx.restore()
+}
+function clear(n, x, y, ww, hh){
+ ctx.fillStyle = 'rgba(255,255,255,' + (n || 0.9) + ')'
+ ctx.fillRect(x || 0, y || 0, ww || w, hh || h)
+ recenter()
+}
+function triangle(px,py,r){
+ setTimeout( () => tri(px,py,r), Math.random()*10)
+ // setTimeout( () => tri(px,py,r), Math.random()*200)
+ // setTimeout( () => tri(px,py,r), Math.random()*300)
+}
+function tri(px, py, r) {
+ ctx.save()
+ ctx.globalCompositeOperation = 'multiply'
+ ctx.fillStyle = color.color((px+py)/(w+h), 0, 1, 0.2)
+ function p(){
+ let theta = randrange(0, Math.PI*2)
+ let x = px + Math.cos(theta) * r
+ let y = py + Math.sin(theta) * r
+ return { x, y }
+ }
+ ctx.beginPath()
+ const p0 = p(), p1 = p(), p2 = p()
+ ctx.moveTo(p0.x, p0.y)
+ ctx.lineTo(p1.x, p1.y)
+ ctx.lineTo(p2.x, p2.y)
+ ctx.lineTo(p0.x, p0.y)
+ ctx.fill()
+ ctx.restore()
+}
+function line(y){
+ ctx.beginPath()
+ ctx.moveTo(0, y)
+ ctx.lineTo(w, y)
+ ctx.strokeStyle = "#888"
+ ctx.strokeWidth = 1
+ ctx.stroke()
+}
+function dot(x, y, r){
+ ctx.fillStyle = "#f00"
+ ctx.beginPath()
+ ctx.moveTo(x, y)
+ ctx.arc(x, y, r, 0, 2*Math.PI)
+ ctx.fill()
+}
+function waveform(pcm, sr, pos, zoom){
+ sr = sr || 44100
+ pos = pos || 0
+
+ var width = w
+ var height = Math.floor(h/4)
+ var half_height = Math.floor(height/2)
+ var x0 = 0
+ var y0 = 20
+ var ymid = y0 + half_height
+ var max_width_in_seconds = width / pixels_per_second
+ var max_width_in_samples = max_width_in_seconds * sr
+ var pcm_length = pcm.length
+ var len = Math.min(pcm_length, max_width_in_samples)
+ var pcm_step = sr / pixels_per_second
+ var i
+ ctx.save()
+
+ clear(1, x0, y0, width, height)
+
+ line(ymid)
+ ctx.beginPath()
+ for (i = 0; i < width; i += 0.5) {
+ var si = Math.floor(pcm_step * i + pos)
+ if (si > pcm_length) break
+ var val = pcm[si] // -1, 1
+ // ctx.moveTo(x0 + i, ymid)
+ ctx.lineTo(x0 + i, ymid + val * half_height)
+ }
+ ctx.strokeStyle = "rgba(250,20,0,0.9)"
+ ctx.strokeWidth = 1
+ ctx.stroke()
+ ctx.restore()
+}
+
+export default {
+ canvas, ctx, onFrame, resize,
+ triangle, clear, line, dot,
+ waveform, spectrum, raw_spectrum,
+} \ No newline at end of file
diff --git a/app/client/audio/lib/draw.js b/app/client/audio/lib/draw.js
new file mode 100644
index 0000000..f5ba3ac
--- /dev/null
+++ b/app/client/audio/lib/draw.js
@@ -0,0 +1,134 @@
+const scratch = document.createElement('canvas')
+const scratchCtx = scratch.getContext('2d-lodpi')
+
+function spectrum(spec, x0, y0, ww, hh){
+ const data = spec.data
+ const fft_size = spec.fft_size
+ const half_fft_size = spec.fft_size / 2
+ const spec_len = data.length
+
+ scratch.width = data.length
+ scratch.height = half_fft_size
+
+ var imageData = ctx.createImageData(scratch.width, scratch.height)
+ var pixels = imageData.data
+
+ let i, j, u, v, _r, _i, col, hsl
+
+ for (i = 0; i < spec_len; i++) {
+ col = data[i]
+
+ for (j = 0; j < half_fft_size; j++) {
+ u = ((half_fft_size - j) * spec_len + i) * 4
+ v = j * 2
+ _r = col[v]
+ _i = mod(col[v+1], Math.PI*2) / (Math.PI*2)
+ hsl = color.hsl2rgb((_i + 1) / 2, 1.0, 1 - Math.abs(_r / 10))
+ // red - real part
+ // pixels[u] = _r * 127 + 127
+ // // green - imag part
+ // pixels[u+1] = _i * 127 + 127
+ // // blue - magnitude
+ // pixels[u+2] = Math.sqrt(Math.pow(_r, 2) + Math.pow(_i, 2)) * 128 + 127
+ // pixels[u+3] = 255
+ pixels[u] = hsl[0]
+ pixels[u+1] = hsl[1]
+ pixels[u+2] = hsl[2]
+ pixels[u+3] = 255
+ }
+ }
+
+ scratchCtx.putImageData(imageData, 0, 0)
+
+ var pcm_length = spec.fft_overlap * spec_len
+
+ x0 = x0 * devicePixelRatio || 0
+ y0 = y0 * devicePixelRatio || Math.floor(h/4)
+ ww = ww * devicePixelRatio || w
+ hh = hh * devicePixelRatio || h/4
+
+ const width = Math.round(pcm_length / spec.sr * pixels_per_second)
+ const height = Math.floor(hh)
+
+ ctx.save()
+ clear(1, x0, y0, w, height)
+ ctx.drawImage(scratch, x0, y0, width, height)
+ ctx.restore()
+}
+
+function raw_spectrum(spec, x0, y0, ww, hh, def_min_r, def_min_i){
+ const data = spec.data
+ const fft_size = spec.fft_size
+ const half_fft_size = spec.fft_size / 2
+ const spec_len = data.length
+
+ const _scratch = document.createElement('canvas')
+ const _scratchCtx = _scratch.getContext('2d-lodpi')
+ _scratch.width = data.length
+ _scratch.height = half_fft_size
+ // console.log("spectrum w/h:", _scratch.width, _scratch.height)
+
+ var imageData = _scratchCtx.createImageData(_scratch.width, _scratch.height)
+ var pixels = imageData.data
+
+ let i, j, u, v, _r, _i, col, hsl
+ // let min_r = Infinity, max_r = -Infinity
+ // let min_i = Infinity, max_i = -Infinity
+
+ // determined empirically..
+ // let min_r = -60.4894057005308
+ // let max_r = 107.23800966675353
+ // let min_i = -59.4894057005308
+ // let max_i = 108.23800966675353
+ let min_r = -def_min_r
+ let max_r = def_min_r
+ let min_i = -def_min_i
+ let max_i = def_min_i
+ let delta_r = max_r - min_r
+ let delta_i = max_i - min_i
+ let mean_r = 0
+ let mean_i = 0
+ let sum_mean_r = 0, sum_mean_i = 0
+ let real, imag
+
+ for (i = 0; i < spec_len; i++) {
+ col = data[i]
+ mean_r = 0
+ mean_i = 0
+
+ for (j = 0; j < half_fft_size; j++) {
+ u = (j * spec_len + i) * 4
+ v = j * 2
+ real = col[v]
+ imag = col[v+1]
+ mean_r += real
+ mean_i += imag
+ _r = clamp((real - min_r) / delta_r * 255, 0, 255)
+ _i = clamp((imag - min_i) / delta_i * 255, 0, 255)
+
+ // hsl = color.hsl2rgb((_i + 1) / 2, 1.0, 1 - Math.abs(_r / 10))
+ pixels[u+0] = _r
+ pixels[u+1] = _i
+ pixels[u+2] = 127 // hsl[2]
+ pixels[u+3] = 255
+
+ // min_r = Math.min(min_r, col[v])
+ // max_r = Math.max(max_r, col[v])
+ // min_i = Math.min(min_i, col[v]+1)
+ // max_i = Math.max(max_i, col[v]+1)
+ }
+ mean_r /= half_fft_size
+ mean_i /= half_fft_size
+ sum_mean_r += mean_r
+ sum_mean_i += mean_i
+ }
+
+ sum_mean_r /= spec_len
+ sum_mean_i /= spec_len
+ // console.log(sum_mean_r, sum_mean_i)
+ // console.log("r:", min_r, max_r)
+ // console.log("i:", min_i, max_i)
+ _scratchCtx.putImageData(imageData, 0, 0)
+
+ return { canvas: _scratch, imageData }
+}
diff --git a/app/client/audio/lib/index.js b/app/client/audio/lib/index.js
new file mode 100644
index 0000000..ba96112
--- /dev/null
+++ b/app/client/audio/lib/index.js
@@ -0,0 +1,47 @@
+import Tone from 'tone'
+import StartAudioContext from './startAudioContext'
+
+import { is_mobile } from '../util'
+
+export function requestAudioContext (fn) {
+ if (is_mobile) {
+ const container = document.createElement('div')
+ const button = document.createElement('div')
+ button.innerHTML = 'Tap to start - please unmute your phone'
+ Object.assign(container.style, {
+ display: 'block',
+ position: 'absolute',
+ width: '100%',
+ height: '100%',
+ zIndex: '10000',
+ top: '0px',
+ left: '0px',
+ backgroundColor: 'rgba(0, 0, 0, 0.8)',
+ })
+ Object.assign(button.style, {
+ display: 'block',
+ position: 'absolute',
+ left: '50%',
+ top: '50%',
+ padding: '20px',
+ backgroundColor: '#7F33ED',
+ color: 'white',
+ fontFamily: 'monospace',
+ borderRadius: '3px',
+ transform: 'translate3D(-50%,-50%,0)',
+ textAlign: 'center',
+ lineHeight: '1.5',
+ width: '150px',
+ })
+ container.appendChild(button)
+ document.body.appendChild(container)
+ StartAudioContext.setContext(Tone.context)
+ StartAudioContext.on(button)
+ StartAudioContext.onStarted(_ => {
+ container.remove()
+ fn()
+ })
+ } else {
+ fn()
+ }
+} \ No newline at end of file
diff --git a/app/client/audio/lib/output.js b/app/client/audio/lib/output.js
new file mode 100644
index 0000000..53901b3
--- /dev/null
+++ b/app/client/audio/lib/output.js
@@ -0,0 +1,8 @@
+import Tone from 'tone'
+
+// const compressor = new Tone.Compressor(-30, 3).toMaster()
+
+const compressor = new Tone.Compressor(-30, 3).toMaster()
+const gain = new Tone.Gain(1).connect(compressor)
+
+export default gain
diff --git a/app/client/audio/lib/spectrum.js b/app/client/audio/lib/spectrum.js
new file mode 100644
index 0000000..f4a5444
--- /dev/null
+++ b/app/client/audio/lib/spectrum.js
@@ -0,0 +1,278 @@
+import Tone from 'tone'
+
+import { shuffle, quantize, mod } from '../util'
+
+import { windows as signalWindows } from 'signal-windows'
+import FFTJS from 'fft.js'
+
+const fft_size = 512
+const fft_overlap = fft_size / 4
+
+const fft = new FFTJS(fft_size)
+
+function toSpectrum(pcm, sr){
+ sr = sr || 44100
+ const ham = signalWindows.construct('ham', fft_size)
+ const pcm_in = new Array(fft_size)
+ const pcm_length = pcm.length
+ const pcm_q_length = Math.ceil(pcm_length / fft_size) * fft_size
+ let i, j, fft_out, data = [];
+ for (i = -fft_size; i < pcm_q_length; i += fft_overlap) {
+ for (j = 0; j < fft_size; j++) {
+ pcm_in[j] = pcm[i+j] * ham[j] || 0
+ }
+ fft_out = fft.createComplexArray()
+ fft.realTransform(fft_out, pcm_in)
+ fft.completeSpectrum(fft_out)
+ data.push(fft_out)
+ }
+ return {
+ data,
+ sr,
+ fft_size,
+ fft_overlap,
+ }
+}
+
+function fromSpectrum(spec){
+ const data = spec.data
+ const sr = spec.sr
+ const fft_size = spec.fft_size
+ const fft_overlap = spec.fft_overlap
+ const spec_len = data.length
+
+ const ham = signalWindows.construct('ham', fft_size)
+ const out = fft.createComplexArray()
+ const pcm_length = fft_overlap * spec_len
+
+ const audioBuffer = Tone.context.createBuffer(1, pcm_length, sr)
+ const pcm = audioBuffer.getChannelData(0);
+
+ let i, j, u, col
+
+ for (i = 0; i < spec_len; i++) {
+ col = data[i]
+ // for (j = fft_size; j < fft_size << 1; j++) {
+ // col[j] = 0
+ // }
+ // if (i == 0) console.log(col)
+ fft.inverseTransform(out, col)
+ u = i * (fft_overlap)
+ for (j = 0; j < fft_size; j++) {
+ pcm[u+j] += out[j*2] * ham[j] || 0
+ }
+ }
+
+ fadeInOut(pcm, fft_size)
+ // console.log(pcm)
+ return audioBuffer
+}
+
+function fromImageData(imageData, sr, _r, _i) {
+ const pixels = imageData.data
+ const w = imageData.width
+ const h = imageData.height
+ let data = new Array(w)
+ let x, y, u, v, v2
+ for (y = 0; y < h; y++) {
+ let col = data[y] = new Float32Array(h * 4)
+ for (x = 0; x < w; x++) {
+ u = (x * (w) + y) * 4
+ v = x * 2
+ col[v] = (pixels[u] / 255 - 0.5) * _r
+ col[v+1] = (pixels[u+1] / 255 - 0.5) * _i
+ v2 = (h-y + h) * 2
+ col[v2] = col[v]
+ col[v2+1] = 0 // col[v+1]
+ }
+ col[h*2] = col[h*2+1] = col[h*2-1] = col[h*2-2] = 0
+ }
+ const spec = {
+ data,
+ sr,
+ fft_size, fft_overlap
+ }
+ return spec
+}
+
+function binToHz(spec, i){
+ return (i / spec.fft_size) * spec.sr
+}
+
+function fadeInOut(pcm, fade_size){
+ const pcm_length = pcm.length
+ let fade = 0, i
+ for (i = 0; i < fade_size; i++) {
+ fade = i / (fade_size)
+ fade *= fade
+ pcm[i] *= fade
+ pcm[pcm_length - i] *= fade
+ }
+}
+function rotatePhase(spec, theta){
+ let { data, fft_size } = spec
+ let i, j, col, len = data.length
+ for (i = 0; i < len; i++) {
+ col = data[i]
+ for (j = 0; j < fft_size; j++) {
+ col[j*2+1] += theta
+ }
+ }
+ return spec
+}
+
+function linearBins(spec, n){
+ n = n || 1
+
+ let bins = [], i, q_i
+ for (q_i = 0; q_i < n; q_i++) {
+ bins[q_i] = []
+ }
+ const step = Math.floor(spec.fft_size / n)
+ const len_quantize_n = quantize(spec.fft_size, n)
+ for (i = 0; i < len_quantize_n; i++) {
+ q_i = Math.floor(i/step)
+ bins[q_i] = bins[q_i] || []
+ bins[q_i].push(i)
+ }
+ // leftover bins get put at end
+ for (; i < spec.fft_size; i++) {
+ bins[q_i].push(i)
+ }
+ return bins
+}
+function logarithmicBins(spec){
+ let bins = [], i, j, q_i
+ let binCount = Math.log2(spec.fft_size) - 1
+ for (i = 0, q_i = 0, j = 0; i < binCount; i++) {
+ j += 1 << i
+ bins[i] = []
+ for (; q_i < j; q_i++) {
+ bins[i].push(q_i)
+ }
+ }
+ return bins
+}
+function concatBins(bins){
+ return bins.reduce((acc, cv) => acc.concat(cv), [])
+}
+function reverseBins(bins){
+ return bins.map( bin => bin.reverse() )
+}
+function minBins(bins){
+ return bins.map( bin => {
+ const b = bin[0]
+ return bin.map(() => b)
+ })
+}
+function maxBins(bins){
+ return bins.map( bin => {
+ const b = bin[bin.length-1]
+ return bin.map(() => b)
+ })
+}
+function rotateSpectrum(spec, n){
+ const { fft_size } = spec
+ if (n && n < 1) {
+ n -= 0.5
+ n *= fft_size
+ }
+ n = Math.floor(n)
+ let order = new Array(fft_size), i
+ for (i = 0; i < fft_size; i++) {
+ order[i] = mod(i + n, fft_size/2)
+ }
+ return reorderBins(spec, order)
+}
+function cloneSpectrum(spec){
+ const {
+ data,
+ fft_size,
+ sr, fft_overlap
+ } = spec
+ const spec_len = data.length
+
+ let new_data = new Array(spec_len)
+ let i
+ for (i = 0; i < spec_len; i++) {
+ new_data[i] = data[i].concat()
+ new_data[i][2] = 0
+ }
+
+ return {
+ data: new_data,
+ fft_size,
+ sr, fft_overlap,
+ }
+}
+function reverseSpectrum(spec){
+ let new_spec = cloneSpectrum(spec)
+ new_spec.data = new_spec.data.reverse()
+ return new_spec
+}
+function shuffleSpectrum(spec){
+ const { fft_size } = spec
+ let order = new Array(fft_size), i
+ for (i = 0; i < fft_size; i++) {
+ order[i] = i
+ }
+ shuffle(order)
+ return reorderBins(spec, order)
+}
+function invertSpectrum(spec){
+ const { fft_size } = spec
+ let order = new Array(fft_size), i
+ for (i = 0; i < fft_size; i++) {
+ order[i] = fft_size - i - 1
+ }
+ return reorderBins(spec, order)
+}
+function reorderBins(spec, order){
+ let new_spec = cloneSpectrum(spec)
+ const {
+ data,
+ sr,
+ fft_size,
+ fft_overlap,
+ } = spec
+ const spec_len = data.length
+ const { data: new_data } = new_spec
+
+ let i, j, col, new_col
+ for (j = order.length; j < fft_size; j++) {
+ order[j] = j
+ }
+
+ for (i = 0; i < spec_len; i++) {
+ col = data[i]
+ new_col = new_data[i] = data[i].concat()
+ col[0] = 0
+ col[2] = 0
+ col[4] = 0
+ for (j = 0; j < fft_size/2; j++) {
+ new_col[j*2] = col[order[j]*2]
+ new_col[j*2+1] = col[order[j]*2+1]
+ }
+ for (; j < fft_size; j++) {
+ new_col[j*2] = 0
+ new_col[j*2+1] = 0
+ }
+ }
+
+ return {
+ data: new_data,
+ sr, fft_size, fft_overlap,
+ }
+}
+
+export default {
+ toSpectrum, fromSpectrum, fromImageData, binToHz,
+ fadeInOut,
+ cloneSpectrum,
+ reverseSpectrum, shuffleSpectrum, invertSpectrum, rotateSpectrum,
+ reorderBins,
+ linearBins, logarithmicBins,
+ concatBins,
+ reverseBins, minBins, maxBins,
+ rotatePhase,
+}
diff --git a/app/client/audio/lib/startAudioContext.js b/app/client/audio/lib/startAudioContext.js
new file mode 100644
index 0000000..0e257be
--- /dev/null
+++ b/app/client/audio/lib/startAudioContext.js
@@ -0,0 +1,179 @@
+/**
+ * StartAudioContext.js
+ * @author Yotam Mann
+ * @license http://opensource.org/licenses/MIT MIT License
+ * @copyright 2016 Yotam Mann
+ */
+(function (root, factory) {
+ if (typeof define === "function" && define.amd) {
+ define([], factory);
+ } else if (typeof module === 'object' && module.exports) {
+ module.exports = factory();
+ } else {
+ root.StartAudioContext = factory();
+ }
+}(this, function () {
+
+ /**
+ * The StartAudioContext object
+ */
+ var StartAudioContext = {
+ /**
+ * The audio context passed in by the user
+ * @type {AudioContext}
+ */
+ context : null,
+ /**
+ * The TapListeners bound to the elements
+ * @type {Array}
+ * @private
+ */
+ _tapListeners : [],
+ /**
+ * Callbacks to invoke when the audio context is started
+ * @type {Array}
+ * @private
+ */
+ _onStarted : [],
+ };
+
+
+ /**
+ * Set the context
+ * @param {AudioContext} ctx
+ * @returns {StartAudioContext}
+ */
+ StartAudioContext.setContext = function(ctx){
+ StartAudioContext.context = ctx;
+ return StartAudioContext;
+ };
+
+ /**
+ * Add a tap listener to the audio context
+ * @param {Array|Element|String|jQuery} element
+ * @returns {StartAudioContext}
+ */
+ StartAudioContext.on = function(element){
+ if (Array.isArray(element) || (NodeList && element instanceof NodeList)){
+ for (var i = 0; i < element.length; i++){
+ StartAudioContext.on(element[i]);
+ }
+ } else if (typeof element === "string"){
+ StartAudioContext.on(document.querySelectorAll(element));
+ } else if (element.jquery && typeof element.toArray === "function"){
+ StartAudioContext.on(element.toArray());
+ } else if (Element && element instanceof Element){
+ //if it's an element, create a TapListener
+ var tap = new TapListener(element, onTap);
+ StartAudioContext._tapListeners.push(tap);
+ }
+ return StartAudioContext;
+ };
+
+ /**
+ * Bind a callback to when the audio context is started.
+ * @param {Function} cb
+ * @return {StartAudioContext}
+ */
+ StartAudioContext.onStarted = function(cb){
+ //if it's already started, invoke the callback
+ if (StartAudioContext.isStarted()){
+ cb();
+ } else {
+ StartAudioContext._onStarted.push(cb);
+ }
+ return StartAudioContext;
+ };
+
+ /**
+ * returns true if the context is started
+ * @return {Boolean}
+ */
+ StartAudioContext.isStarted = function(){
+ return (StartAudioContext.context !== null && StartAudioContext.context.state === "running");
+ };
+
+ /**
+ * @class Listens for non-dragging tap ends on the given element
+ * @param {Element} element
+ * @internal
+ */
+ var TapListener = function(element){
+
+ this._dragged = false;
+
+ this._element = element;
+
+ this._bindedMove = this._moved.bind(this);
+ this._bindedEnd = this._ended.bind(this);
+
+ element.addEventListener("touchmove", this._bindedMove);
+ element.addEventListener("touchend", this._bindedEnd);
+ element.addEventListener("mouseup", this._bindedEnd);
+ };
+
+ /**
+ * drag move event
+ */
+ TapListener.prototype._moved = function(e){
+ this._dragged = true;
+ };
+
+ /**
+ * tap ended listener
+ */
+ TapListener.prototype._ended = function(e){
+ if (!this._dragged){
+ onTap();
+ }
+ this._dragged = false;
+ };
+
+ /**
+ * remove all the bound events
+ */
+ TapListener.prototype.dispose = function(){
+ this._element.removeEventListener("touchmove", this._bindedMove);
+ this._element.removeEventListener("touchend", this._bindedEnd);
+ this._element.removeEventListener("mouseup", this._bindedEnd);
+ this._bindedMove = null;
+ this._bindedEnd = null;
+ this._element = null;
+ };
+
+ /**
+ * Invoked the first time of the elements is tapped.
+ * Creates a silent oscillator when a non-dragging touchend
+ * event has been triggered.
+ */
+ function onTap(){
+ //start the audio context with a silent oscillator
+ if (StartAudioContext.context && !StartAudioContext.isStarted()){
+ var osc = StartAudioContext.context.createOscillator();
+ var silent = StartAudioContext.context.createGain();
+ silent.gain.value = 0;
+ osc.connect(silent);
+ silent.connect(StartAudioContext.context.destination);
+ var now = StartAudioContext.context.currentTime;
+ osc.start(now);
+ osc.stop(now+0.5);
+ }
+
+ //dispose all the tap listeners
+ if (StartAudioContext._tapListeners){
+ for (var i = 0; i < StartAudioContext._tapListeners.length; i++){
+ StartAudioContext._tapListeners[i].dispose();
+ }
+ StartAudioContext._tapListeners = null;
+ }
+ //the onstarted callbacks
+ if (StartAudioContext._onStarted){
+ for (var j = 0; j < StartAudioContext._onStarted.length; j++){
+ StartAudioContext._onStarted[j]();
+ }
+ StartAudioContext._onStarted = null;
+ }
+ }
+
+ return StartAudioContext;
+}));